chore: prettier formatting

This commit is contained in:
Eyal Toledano
2025-04-09 18:20:47 -04:00
parent 12519946b4
commit 4f68bf3b47
7 changed files with 487 additions and 376 deletions

View File

@@ -50,13 +50,16 @@ export async function addTaskDirect(args, log, context = {}) {
// Check required parameters
if (!args.prompt && !isManualCreation) {
log.error('Missing required parameters: either prompt or title+description must be provided');
log.error(
'Missing required parameters: either prompt or title+description must be provided'
);
disableSilentMode();
return {
success: false,
error: {
code: 'MISSING_PARAMETER',
message: 'Either the prompt parameter or both title and description parameters are required for adding a task'
message:
'Either the prompt parameter or both title and description parameters are required for adding a task'
}
};
}

View File

@@ -22,11 +22,28 @@ export function registerAddTaskTool(server) {
name: 'add_task',
description: 'Add a new task using AI',
parameters: z.object({
prompt: z.string().optional().describe('Description of the task to add (required if not using manual fields)'),
title: z.string().optional().describe('Task title (for manual task creation)'),
description: z.string().optional().describe('Task description (for manual task creation)'),
details: z.string().optional().describe('Implementation details (for manual task creation)'),
testStrategy: z.string().optional().describe('Test strategy (for manual task creation)'),
prompt: z
.string()
.optional()
.describe(
'Description of the task to add (required if not using manual fields)'
),
title: z
.string()
.optional()
.describe('Task title (for manual task creation)'),
description: z
.string()
.optional()
.describe('Task description (for manual task creation)'),
details: z
.string()
.optional()
.describe('Implementation details (for manual task creation)'),
testStrategy: z
.string()
.optional()
.describe('Test strategy (for manual task creation)'),
dependencies: z
.string()
.optional()
@@ -35,11 +52,16 @@ export function registerAddTaskTool(server) {
.string()
.optional()
.describe('Task priority (high, medium, low)'),
file: z.string().optional().describe('Path to the tasks file (default: tasks/tasks.json)'),
file: z
.string()
.optional()
.describe('Path to the tasks file (default: tasks/tasks.json)'),
projectRoot: z
.string()
.optional()
.describe('Root directory of the project (default: current working directory)'),
.describe(
'Root directory of the project (default: current working directory)'
),
research: z
.boolean()
.optional()

View File

@@ -791,20 +791,46 @@ function registerCommands(programInstance) {
.command('add-task')
.description('Add a new task using AI or manual input')
.option('-f, --file <file>', 'Path to the tasks file', 'tasks/tasks.json')
.option('-p, --prompt <prompt>', 'Description of the task to add (required if not using manual fields)')
.option(
'-p, --prompt <prompt>',
'Description of the task to add (required if not using manual fields)'
)
.option('-t, --title <title>', 'Task title (for manual task creation)')
.option('-d, --description <description>', 'Task description (for manual task creation)')
.option('--details <details>', 'Implementation details (for manual task creation)')
.option('--test-strategy <testStrategy>', 'Test strategy (for manual task creation)')
.option('--dependencies <dependencies>', 'Comma-separated list of task IDs this task depends on')
.option('--priority <priority>', 'Task priority (high, medium, low)', 'medium')
.option('-r, --research', 'Whether to use research capabilities for task creation')
.option(
'-d, --description <description>',
'Task description (for manual task creation)'
)
.option(
'--details <details>',
'Implementation details (for manual task creation)'
)
.option(
'--test-strategy <testStrategy>',
'Test strategy (for manual task creation)'
)
.option(
'--dependencies <dependencies>',
'Comma-separated list of task IDs this task depends on'
)
.option(
'--priority <priority>',
'Task priority (high, medium, low)',
'medium'
)
.option(
'-r, --research',
'Whether to use research capabilities for task creation'
)
.action(async (options) => {
const isManualCreation = options.title && options.description;
// Validate that either prompt or title+description are provided
if (!options.prompt && !isManualCreation) {
console.error(chalk.red('Error: Either --prompt or both --title and --description must be provided'));
console.error(
chalk.red(
'Error: Either --prompt or both --title and --description must be provided'
)
);
process.exit(1);
}
@@ -812,7 +838,9 @@ function registerCommands(programInstance) {
// Prepare dependencies if provided
let dependencies = [];
if (options.dependencies) {
dependencies = options.dependencies.split(',').map(id => parseInt(id.trim(), 10));
dependencies = options.dependencies
.split(',')
.map((id) => parseInt(id.trim(), 10));
}
// Create manual task data if title and description are provided
@@ -825,17 +853,27 @@ function registerCommands(programInstance) {
testStrategy: options.testStrategy || ''
};
console.log(chalk.blue(`Creating task manually with title: "${options.title}"`));
console.log(
chalk.blue(`Creating task manually with title: "${options.title}"`)
);
if (dependencies.length > 0) {
console.log(chalk.blue(`Dependencies: [${dependencies.join(', ')}]`));
console.log(
chalk.blue(`Dependencies: [${dependencies.join(', ')}]`)
);
}
if (options.priority) {
console.log(chalk.blue(`Priority: ${options.priority}`));
}
} else {
console.log(chalk.blue(`Creating task with AI using prompt: "${options.prompt}"`));
console.log(
chalk.blue(
`Creating task with AI using prompt: "${options.prompt}"`
)
);
if (dependencies.length > 0) {
console.log(chalk.blue(`Dependencies: [${dependencies.join(', ')}]`));
console.log(
chalk.blue(`Dependencies: [${dependencies.join(', ')}]`)
);
}
if (options.priority) {
console.log(chalk.blue(`Priority: ${options.priority}`));

View File

@@ -3506,7 +3506,9 @@ async function addTask(
'\n' +
chalk.white(`Status: ${getStatusWithColor(newTask.status)}`) +
'\n' +
chalk.white(`Priority: ${chalk.keyword(getPriorityColor(newTask.priority))(newTask.priority)}`) +
chalk.white(
`Priority: ${chalk.keyword(getPriorityColor(newTask.priority))(newTask.priority)}`
) +
'\n' +
(dependencies.length > 0
? chalk.white(`Dependencies: ${dependencies.join(', ')}`) + '\n'
@@ -3514,11 +3516,17 @@ async function addTask(
'\n' +
chalk.white.bold('Next Steps:') +
'\n' +
chalk.cyan(`1. Run ${chalk.yellow(`task-master show ${newTaskId}`)} to see complete task details`) +
chalk.cyan(
`1. Run ${chalk.yellow(`task-master show ${newTaskId}`)} to see complete task details`
) +
'\n' +
chalk.cyan(`2. Run ${chalk.yellow(`task-master set-status --id=${newTaskId} --status=in-progress`)} to start working on it`) +
chalk.cyan(
`2. Run ${chalk.yellow(`task-master set-status --id=${newTaskId} --status=in-progress`)} to start working on it`
) +
'\n' +
chalk.cyan(`3. Run ${chalk.yellow(`task-master expand --id=${newTaskId}`)} to break it down into subtasks`),
chalk.cyan(
`3. Run ${chalk.yellow(`task-master expand --id=${newTaskId}`)} to break it down into subtasks`
),
{ padding: 1, borderColor: 'green', borderStyle: 'round' }
)
);

View File

@@ -3,7 +3,10 @@
*/
import { jest } from '@jest/globals';
import { sampleTasks, emptySampleTasks } from '../../tests/fixtures/sample-tasks.js';
import {
sampleTasks,
emptySampleTasks
} from '../../tests/fixtures/sample-tasks.js';
// Mock functions that need jest.fn methods
const mockParsePRD = jest.fn().mockResolvedValue(undefined);
@@ -658,11 +661,24 @@ describe('Commands Module', () => {
// Create a mock task manager with an addTask function that resolves to taskId 5
mockTaskManager = {
addTask: jest.fn().mockImplementation((file, prompt, dependencies, priority, session, research, generateFiles, manualTaskData) => {
addTask: jest
.fn()
.mockImplementation(
(
file,
prompt,
dependencies,
priority,
session,
research,
generateFiles,
manualTaskData
) => {
// Return the next ID after the last one in sample tasks
const newId = sampleTasks.tasks.length + 1;
return Promise.resolve(newId.toString());
})
}
)
};
// Create a simplified version of the add-task action function for testing
@@ -676,13 +692,15 @@ describe('Commands Module', () => {
// Validate that either prompt or title+description are provided
if (!prompt && !isManualCreation) {
throw new Error('Either --prompt or both --title and --description must be provided');
throw new Error(
'Either --prompt or both --title and --description must be provided'
);
}
// Prepare dependencies if provided
let dependencies = [];
if (options.dependencies) {
dependencies = options.dependencies.split(',').map(id => id.trim());
dependencies = options.dependencies.split(',').map((id) => id.trim());
}
// Create manual task data if title and description are provided
@@ -716,7 +734,9 @@ describe('Commands Module', () => {
await expect(async () => {
await addTaskAction(undefined, options);
}).rejects.toThrow('Either --prompt or both --title and --description must be provided');
}).rejects.toThrow(
'Either --prompt or both --title and --description must be provided'
);
});
test('should handle short-hand flag -p for prompt', async () => {
@@ -782,7 +802,8 @@ describe('Commands Module', () => {
{ session: process.env },
false,
null, // Generate files parameter
{ // Manual task data
{
// Manual task data
title: 'Login Component',
description: 'Create a reusable login form',
details: 'Implementation details here',

View File

@@ -10,7 +10,10 @@
*/
import { jest } from '@jest/globals';
import { sampleTasks, emptySampleTasks } from '../../../fixtures/sample-tasks.js';
import {
sampleTasks,
emptySampleTasks
} from '../../../fixtures/sample-tasks.js';
// Mock EVERYTHING
const mockAddTaskDirect = jest.fn();
@@ -18,9 +21,9 @@ jest.mock('../../../../mcp-server/src/core/task-master-core.js', () => ({
addTaskDirect: mockAddTaskDirect
}));
const mockHandleApiResult = jest.fn(result => result);
const mockHandleApiResult = jest.fn((result) => result);
const mockGetProjectRootFromSession = jest.fn(() => '/mock/project/root');
const mockCreateErrorResponse = jest.fn(msg => ({
const mockCreateErrorResponse = jest.fn((msg) => ({
success: false,
error: { code: 'ERROR', message: msg }
}));
@@ -29,7 +32,10 @@ jest.mock('../../../../mcp-server/src/tools/utils.js', () => ({
getProjectRootFromSession: mockGetProjectRootFromSession,
handleApiResult: mockHandleApiResult,
createErrorResponse: mockCreateErrorResponse,
createContentResponse: jest.fn(content => ({ success: true, data: content })),
createContentResponse: jest.fn((content) => ({
success: true,
data: content
})),
executeTaskMasterCommand: jest.fn()
}));
@@ -40,14 +46,16 @@ const mockZod = {
boolean: jest.fn(() => mockZod),
optional: jest.fn(() => mockZod),
describe: jest.fn(() => mockZod),
_def: { shape: () => ({
_def: {
shape: () => ({
prompt: {},
dependencies: {},
priority: {},
research: {},
file: {},
projectRoot: {}
})}
})
}
};
jest.mock('zod', () => ({
@@ -68,16 +76,21 @@ const registerAddTaskTool = (server) => {
const { log, reportProgress, session } = context;
try {
log.info && log.info(`Starting add-task with args: ${JSON.stringify(args)}`);
log.info &&
log.info(`Starting add-task with args: ${JSON.stringify(args)}`);
// Get project root
const rootFolder = mockGetProjectRootFromSession(session, log);
// Call addTaskDirect
const result = mockAddTaskDirect({
const result = mockAddTaskDirect(
{
...args,
projectRoot: rootFolder
}, log, { reportProgress, session });
},
log,
{ reportProgress, session }
);
// Handle result
return mockHandleApiResult(result, log);
@@ -136,7 +149,7 @@ describe('MCP Tool: add-task', () => {
// Create mock server
mockServer = {
addTool: jest.fn(config => {
addTool: jest.fn((config) => {
executeFunction = config.execute;
})
};
@@ -220,10 +233,7 @@ describe('MCP Tool: add-task', () => {
expect(mockAddTaskDirect).toHaveBeenCalled();
// Verify handleApiResult was called with error response
expect(mockHandleApiResult).toHaveBeenCalledWith(
errorResponse,
mockLogger
);
expect(mockHandleApiResult).toHaveBeenCalledWith(errorResponse, mockLogger);
});
test('should handle unexpected errors', () => {
@@ -261,10 +271,13 @@ describe('MCP Tool: add-task', () => {
};
// Test with research=true
executeFunction({
executeFunction(
{
...validArgs,
research: true
}, mockContext);
},
mockContext
);
// Verify addTaskDirect was called with research=true
expect(mockAddTaskDirect).toHaveBeenCalledWith(
@@ -279,10 +292,13 @@ describe('MCP Tool: add-task', () => {
jest.clearAllMocks();
// Test with research=false
executeFunction({
executeFunction(
{
...validArgs,
research: false
}, mockContext);
},
mockContext
);
// Verify addTaskDirect was called with research=false
expect(mockAddTaskDirect).toHaveBeenCalledWith(
@@ -303,15 +319,18 @@ describe('MCP Tool: add-task', () => {
};
// Test different priority values
['high', 'medium', 'low'].forEach(priority => {
['high', 'medium', 'low'].forEach((priority) => {
// Reset mocks
jest.clearAllMocks();
// Execute with specific priority
executeFunction({
executeFunction(
{
...validArgs,
priority
}, mockContext);
},
mockContext
);
// Verify addTaskDirect was called with correct priority
expect(mockAddTaskDirect).toHaveBeenCalledWith(