Compare commits
5 Commits
chore/fix.
...
task-maste
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb15c2eaf7 | ||
|
|
e8ceb08341 | ||
|
|
e495b2b559 | ||
|
|
e0d1d03f33 | ||
|
|
4a4bca905d |
8
.changeset/fuzzy-words-count.md
Normal file
8
.changeset/fuzzy-words-count.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
"task-master-ai": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Fix scope-up/down prompts to include all required fields for better AI model compatibility
|
||||||
|
|
||||||
|
- Added missing `priority` field to scope adjustment prompts to prevent validation errors with Claude-code and other models
|
||||||
|
- Ensures generated JSON includes all fields required by the schema
|
||||||
8
.changeset/tender-trams-refuse.md
Normal file
8
.changeset/tender-trams-refuse.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
"task-master-ai": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Fix MCP scope-up/down tools not finding tasks
|
||||||
|
|
||||||
|
- Fixed task ID parsing in MCP layer - now correctly converts string IDs to numbers
|
||||||
|
- scope_up_task and scope_down_task MCP tools now work properly
|
||||||
11
.changeset/vast-sites-leave.md
Normal file
11
.changeset/vast-sites-leave.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
"task-master-ai": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Improve AI provider compatibility for JSON generation
|
||||||
|
|
||||||
|
- Fixed schema compatibility issues between Perplexity and OpenAI o3 models
|
||||||
|
- Removed nullable/default modifiers from Zod schemas for broader compatibility
|
||||||
|
- Added automatic JSON repair for malformed AI responses (handles cases like missing array values)
|
||||||
|
- Perplexity now uses JSON mode for more reliable structured output
|
||||||
|
- Post-processing handles default values separately from schema validation
|
||||||
76
.github/scripts/tag-extension.mjs
vendored
76
.github/scripts/tag-extension.mjs
vendored
@@ -1,15 +1,37 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
import assert from 'node:assert/strict';
|
import assert from 'node:assert/strict';
|
||||||
import { spawnSync } from 'node:child_process';
|
import { spawnSync } from 'node:child_process';
|
||||||
import { readFileSync } from 'node:fs';
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
import { join, dirname } from 'node:path';
|
import { join, dirname, resolve } from 'node:path';
|
||||||
import { fileURLToPath } from 'node:url';
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
const __dirname = dirname(__filename);
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
// Find the root directory by looking for package.json
|
||||||
|
function findRootDir(startDir) {
|
||||||
|
let currentDir = resolve(startDir);
|
||||||
|
while (currentDir !== '/') {
|
||||||
|
if (existsSync(join(currentDir, 'package.json'))) {
|
||||||
|
// Verify it's the root package.json by checking for expected fields
|
||||||
|
try {
|
||||||
|
const pkg = JSON.parse(
|
||||||
|
readFileSync(join(currentDir, 'package.json'), 'utf8')
|
||||||
|
);
|
||||||
|
if (pkg.name === 'task-master-ai' || pkg.repository) {
|
||||||
|
return currentDir;
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
currentDir = dirname(currentDir);
|
||||||
|
}
|
||||||
|
throw new Error('Could not find root directory');
|
||||||
|
}
|
||||||
|
|
||||||
|
const rootDir = findRootDir(__dirname);
|
||||||
|
|
||||||
// Read the extension's package.json
|
// Read the extension's package.json
|
||||||
const extensionDir = join(__dirname, '..', 'apps', 'extension');
|
const extensionDir = join(rootDir, 'apps', 'extension');
|
||||||
const pkgPath = join(extensionDir, 'package.json');
|
const pkgPath = join(extensionDir, 'package.json');
|
||||||
|
|
||||||
let pkg;
|
let pkg;
|
||||||
@@ -22,7 +44,7 @@ try {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read root package.json for repository info
|
// Read root package.json for repository info
|
||||||
const rootPkgPath = join(__dirname, '..', 'package.json');
|
const rootPkgPath = join(rootDir, 'package.json');
|
||||||
let rootPkg;
|
let rootPkg;
|
||||||
try {
|
try {
|
||||||
const rootPkgContent = readFileSync(rootPkgPath, 'utf8');
|
const rootPkgContent = readFileSync(rootPkgPath, 'utf8');
|
||||||
@@ -40,13 +62,51 @@ assert(rootPkg.repository, 'root package.json must have a repository field');
|
|||||||
const tag = `${pkg.name}@${pkg.version}`;
|
const tag = `${pkg.name}@${pkg.version}`;
|
||||||
|
|
||||||
// Get repository URL from root package.json
|
// Get repository URL from root package.json
|
||||||
const repoUrl = rootPkg.repository.url;
|
// Get repository URL and clean it up for git ls-remote
|
||||||
|
let repoUrl = rootPkg.repository.url || rootPkg.repository;
|
||||||
|
if (typeof repoUrl === 'string') {
|
||||||
|
// Convert git+https://github.com/... to https://github.com/...
|
||||||
|
repoUrl = repoUrl.replace(/^git\+/, '');
|
||||||
|
// Ensure it ends with .git for proper remote access
|
||||||
|
if (!repoUrl.endsWith('.git')) {
|
||||||
|
repoUrl += '.git';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const { status, stdout, error } = spawnSync('git', ['ls-remote', repoUrl, tag]);
|
console.log(`Checking remote repository: ${repoUrl} for tag: ${tag}`);
|
||||||
|
|
||||||
assert.equal(status, 0, error);
|
let gitResult = spawnSync('git', ['ls-remote', repoUrl, tag], {
|
||||||
|
encoding: 'utf8',
|
||||||
|
env: { ...process.env }
|
||||||
|
});
|
||||||
|
|
||||||
const exists = String(stdout).trim() !== '';
|
if (gitResult.status !== 0) {
|
||||||
|
console.error('Git ls-remote failed:');
|
||||||
|
console.error('Exit code:', gitResult.status);
|
||||||
|
console.error('Error:', gitResult.error);
|
||||||
|
console.error('Stderr:', gitResult.stderr);
|
||||||
|
console.error('Command:', `git ls-remote ${repoUrl} ${tag}`);
|
||||||
|
|
||||||
|
// For CI environments, try using origin instead of the full URL
|
||||||
|
if (process.env.CI) {
|
||||||
|
console.log('Retrying with origin remote...');
|
||||||
|
gitResult = spawnSync('git', ['ls-remote', 'origin', tag], {
|
||||||
|
encoding: 'utf8'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (gitResult.status !== 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Failed to check remote for tag ${tag}. Exit code: ${gitResult.status}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Failed to check remote for tag ${tag}. Exit code: ${gitResult.status}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const exists = String(gitResult.stdout).trim() !== '';
|
||||||
|
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
console.log(`Creating new extension tag: ${tag}`);
|
console.log(`Creating new extension tag: ${tag}`);
|
||||||
|
|||||||
5
.github/workflows/pre-release.yml
vendored
5
.github/workflows/pre-release.yml
vendored
@@ -3,11 +3,12 @@ name: Pre-Release (RC)
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch: # Allows manual triggering from GitHub UI/API
|
workflow_dispatch: # Allows manual triggering from GitHub UI/API
|
||||||
|
|
||||||
concurrency: pre-release-${{ github.ref }}
|
concurrency: pre-release-${{ github.ref_name }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
rc:
|
rc:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
# Only allow pre-releases on non-main branches
|
||||||
|
if: github.ref != 'refs/heads/main'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
|||||||
34
.github/workflows/release.yml
vendored
34
.github/workflows/release.yml
vendored
@@ -38,27 +38,27 @@ jobs:
|
|||||||
run: npm ci
|
run: npm ci
|
||||||
timeout-minutes: 2
|
timeout-minutes: 2
|
||||||
|
|
||||||
- name: Exit pre-release mode and clean up
|
- name: Check pre-release mode
|
||||||
run: |
|
run: |
|
||||||
echo "🔄 Ensuring we're not in pre-release mode for main branch..."
|
set -euo pipefail
|
||||||
|
echo "🔍 Checking pre-release mode status..."
|
||||||
# Exit pre-release mode if we're in it
|
if [[ -f .changeset/pre.json ]]; then
|
||||||
npx changeset pre exit || echo "Not in pre-release mode"
|
echo "❌ ERROR: Main branch is in pre-release mode!"
|
||||||
|
echo ""
|
||||||
# Remove pre.json file if it exists (belt and suspenders approach)
|
echo "Pre-release mode should only be used on feature branches, not main."
|
||||||
if [ -f .changeset/pre.json ]; then
|
echo ""
|
||||||
echo "🧹 Removing pre.json file..."
|
echo "To fix this, run the following commands locally:"
|
||||||
rm -f .changeset/pre.json
|
echo " npx changeset pre exit"
|
||||||
fi
|
echo " git add -u"
|
||||||
|
echo " git commit -m 'chore: exit pre-release mode'"
|
||||||
# Verify the file is gone
|
echo " git push origin main"
|
||||||
if [ ! -f .changeset/pre.json ]; then
|
echo ""
|
||||||
echo "✅ pre.json successfully removed"
|
echo "Then re-run this workflow."
|
||||||
else
|
|
||||||
echo "❌ Failed to remove pre.json"
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo "✅ Not in pre-release mode - proceeding with release"
|
||||||
|
|
||||||
- name: Create Release Pull Request or Publish to npm
|
- name: Create Release Pull Request or Publish to npm
|
||||||
uses: changesets/action@v1
|
uses: changesets/action@v1
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -71,8 +71,8 @@ export async function scopeDownDirect(args, log, context = {}) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse task IDs
|
// Parse task IDs - convert to numbers as expected by scopeDownTask
|
||||||
const taskIds = id.split(',').map((taskId) => taskId.trim());
|
const taskIds = id.split(',').map((taskId) => parseInt(taskId.trim(), 10));
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
`Scoping down tasks: ${taskIds.join(', ')}, strength: ${strength}, research: ${research}`
|
`Scoping down tasks: ${taskIds.join(', ')}, strength: ${strength}, research: ${research}`
|
||||||
@@ -90,10 +90,10 @@ export async function scopeDownDirect(args, log, context = {}) {
|
|||||||
projectRoot,
|
projectRoot,
|
||||||
commandName: 'scope-down',
|
commandName: 'scope-down',
|
||||||
outputType: 'mcp',
|
outputType: 'mcp',
|
||||||
tag
|
tag,
|
||||||
|
research
|
||||||
},
|
},
|
||||||
'json', // outputFormat
|
'json' // outputFormat
|
||||||
research
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Restore normal logging
|
// Restore normal logging
|
||||||
|
|||||||
@@ -71,8 +71,8 @@ export async function scopeUpDirect(args, log, context = {}) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse task IDs
|
// Parse task IDs - convert to numbers as expected by scopeUpTask
|
||||||
const taskIds = id.split(',').map((taskId) => taskId.trim());
|
const taskIds = id.split(',').map((taskId) => parseInt(taskId.trim(), 10));
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
`Scoping up tasks: ${taskIds.join(', ')}, strength: ${strength}, research: ${research}`
|
`Scoping up tasks: ${taskIds.join(', ')}, strength: ${strength}, research: ${research}`
|
||||||
@@ -90,10 +90,10 @@ export async function scopeUpDirect(args, log, context = {}) {
|
|||||||
projectRoot,
|
projectRoot,
|
||||||
commandName: 'scope-up',
|
commandName: 'scope-up',
|
||||||
outputType: 'mcp',
|
outputType: 'mcp',
|
||||||
tag
|
tag,
|
||||||
|
research
|
||||||
},
|
},
|
||||||
'json', // outputFormat
|
'json' // outputFormat
|
||||||
research
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Restore normal logging
|
// Restore normal logging
|
||||||
|
|||||||
16
package-lock.json
generated
16
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "task-master-ai",
|
"name": "task-master-ai",
|
||||||
"version": "0.22.1-rc.0",
|
"version": "0.23.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "task-master-ai",
|
"name": "task-master-ai",
|
||||||
"version": "0.22.1-rc.0",
|
"version": "0.23.0",
|
||||||
"license": "MIT WITH Commons-Clause",
|
"license": "MIT WITH Commons-Clause",
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"apps/*",
|
"apps/*",
|
||||||
@@ -46,6 +46,7 @@
|
|||||||
"helmet": "^8.1.0",
|
"helmet": "^8.1.0",
|
||||||
"inquirer": "^12.5.0",
|
"inquirer": "^12.5.0",
|
||||||
"jsonc-parser": "^3.3.1",
|
"jsonc-parser": "^3.3.1",
|
||||||
|
"jsonrepair": "^3.13.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"lru-cache": "^10.2.0",
|
"lru-cache": "^10.2.0",
|
||||||
"ollama-ai-provider": "^1.2.0",
|
"ollama-ai-provider": "^1.2.0",
|
||||||
@@ -84,7 +85,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"apps/extension": {
|
"apps/extension": {
|
||||||
"version": "0.22.3",
|
"version": "0.23.0",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@dnd-kit/core": "^6.3.1",
|
"@dnd-kit/core": "^6.3.1",
|
||||||
"@dnd-kit/modifiers": "^9.0.0",
|
"@dnd-kit/modifiers": "^9.0.0",
|
||||||
@@ -14942,6 +14943,15 @@
|
|||||||
"graceful-fs": "^4.1.6"
|
"graceful-fs": "^4.1.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/jsonrepair": {
|
||||||
|
"version": "3.13.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/jsonrepair/-/jsonrepair-3.13.0.tgz",
|
||||||
|
"integrity": "sha512-5YRzlAQ7tuzV1nAJu3LvDlrKtBFIALHN2+a+I1MGJCt3ldRDBF/bZuvIPzae8Epot6KBXd0awRZZcuoeAsZ/mw==",
|
||||||
|
"license": "ISC",
|
||||||
|
"bin": {
|
||||||
|
"jsonrepair": "bin/cli.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/jsonwebtoken": {
|
"node_modules/jsonwebtoken": {
|
||||||
"version": "9.0.2",
|
"version": "9.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz",
|
||||||
|
|||||||
@@ -73,6 +73,7 @@
|
|||||||
"helmet": "^8.1.0",
|
"helmet": "^8.1.0",
|
||||||
"inquirer": "^12.5.0",
|
"inquirer": "^12.5.0",
|
||||||
"jsonc-parser": "^3.3.1",
|
"jsonc-parser": "^3.3.1",
|
||||||
|
"jsonrepair": "^3.13.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"lru-cache": "^10.2.0",
|
"lru-cache": "^10.2.0",
|
||||||
"ollama-ai-provider": "^1.2.0",
|
"ollama-ai-provider": "^1.2.0",
|
||||||
|
|||||||
@@ -1479,7 +1479,8 @@ function registerCommands(programInstance) {
|
|||||||
projectRoot: taskMaster.getProjectRoot(),
|
projectRoot: taskMaster.getProjectRoot(),
|
||||||
tag,
|
tag,
|
||||||
commandName: 'scope-up',
|
commandName: 'scope-up',
|
||||||
outputType: 'cli'
|
outputType: 'cli',
|
||||||
|
research: options.research || false
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await scopeUpTask(
|
const result = await scopeUpTask(
|
||||||
@@ -1605,7 +1606,8 @@ function registerCommands(programInstance) {
|
|||||||
projectRoot: taskMaster.getProjectRoot(),
|
projectRoot: taskMaster.getProjectRoot(),
|
||||||
tag,
|
tag,
|
||||||
commandName: 'scope-down',
|
commandName: 'scope-down',
|
||||||
outputType: 'cli'
|
outputType: 'cli',
|
||||||
|
research: options.research || false
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await scopeDownTask(
|
const result = await scopeDownTask(
|
||||||
|
|||||||
@@ -23,14 +23,14 @@ import { displayAiUsageSummary } from '../ui.js';
|
|||||||
|
|
||||||
// Define the Zod schema for a SINGLE task object
|
// Define the Zod schema for a SINGLE task object
|
||||||
const prdSingleTaskSchema = z.object({
|
const prdSingleTaskSchema = z.object({
|
||||||
id: z.number().int().positive(),
|
id: z.number(),
|
||||||
title: z.string().min(1),
|
title: z.string().min(1),
|
||||||
description: z.string().min(1),
|
description: z.string().min(1),
|
||||||
details: z.string().nullable(),
|
details: z.string(),
|
||||||
testStrategy: z.string().nullable(),
|
testStrategy: z.string(),
|
||||||
priority: z.enum(['high', 'medium', 'low']).nullable(),
|
priority: z.enum(['high', 'medium', 'low']),
|
||||||
dependencies: z.array(z.number().int().positive()).nullable(),
|
dependencies: z.array(z.number()),
|
||||||
status: z.string().nullable()
|
status: z.string()
|
||||||
});
|
});
|
||||||
|
|
||||||
// Define the Zod schema for the ENTIRE expected AI response object
|
// Define the Zod schema for the ENTIRE expected AI response object
|
||||||
@@ -257,10 +257,15 @@ async function parsePRD(prdPath, tasksPath, numTasks, options = {}) {
|
|||||||
return {
|
return {
|
||||||
...task,
|
...task,
|
||||||
id: newId,
|
id: newId,
|
||||||
status: 'pending',
|
status: task.status || 'pending',
|
||||||
priority: task.priority || 'medium',
|
priority: task.priority || 'medium',
|
||||||
dependencies: Array.isArray(task.dependencies) ? task.dependencies : [],
|
dependencies: Array.isArray(task.dependencies) ? task.dependencies : [],
|
||||||
subtasks: []
|
subtasks: [],
|
||||||
|
// Ensure all required fields have values (even if empty strings)
|
||||||
|
title: task.title || '',
|
||||||
|
description: task.description || '',
|
||||||
|
details: task.details || '',
|
||||||
|
testStrategy: task.testStrategy || ''
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -337,7 +337,7 @@ ${
|
|||||||
}
|
}
|
||||||
|
|
||||||
Return a JSON object with a "subtasks" array. Each subtask should have:
|
Return a JSON object with a "subtasks" array. Each subtask should have:
|
||||||
- id: Sequential number starting from 1
|
- id: Sequential NUMBER starting from 1 (e.g., 1, 2, 3 - NOT "1", "2", "3")
|
||||||
- title: Clear, specific title
|
- title: Clear, specific title
|
||||||
- description: Detailed description
|
- description: Detailed description
|
||||||
- dependencies: Array of dependency IDs as STRINGS (use format ["${task.id}.1", "${task.id}.2"] for siblings, or empty array [] for no dependencies)
|
- dependencies: Array of dependency IDs as STRINGS (use format ["${task.id}.1", "${task.id}.2"] for siblings, or empty array [] for no dependencies)
|
||||||
@@ -345,7 +345,9 @@ Return a JSON object with a "subtasks" array. Each subtask should have:
|
|||||||
- status: "pending"
|
- status: "pending"
|
||||||
- testStrategy: Testing approach
|
- testStrategy: Testing approach
|
||||||
|
|
||||||
IMPORTANT: Dependencies must be strings, not numbers!
|
IMPORTANT:
|
||||||
|
- The 'id' field must be a NUMBER, not a string!
|
||||||
|
- Dependencies must be strings, not numbers!
|
||||||
|
|
||||||
Ensure the JSON is valid and properly formatted.`;
|
Ensure the JSON is valid and properly formatted.`;
|
||||||
|
|
||||||
@@ -358,14 +360,14 @@ Ensure the JSON is valid and properly formatted.`;
|
|||||||
description: z.string().min(10),
|
description: z.string().min(10),
|
||||||
dependencies: z.array(z.string()),
|
dependencies: z.array(z.string()),
|
||||||
details: z.string().min(20),
|
details: z.string().min(20),
|
||||||
status: z.string().default('pending'),
|
status: z.string(),
|
||||||
testStrategy: z.string().nullable().default('')
|
testStrategy: z.string()
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
const aiResult = await generateObjectService({
|
const aiResult = await generateObjectService({
|
||||||
role: 'main',
|
role: context.research ? 'research' : 'main',
|
||||||
session: context.session,
|
session: context.session,
|
||||||
systemPrompt,
|
systemPrompt,
|
||||||
prompt,
|
prompt,
|
||||||
@@ -377,14 +379,21 @@ Ensure the JSON is valid and properly formatted.`;
|
|||||||
|
|
||||||
const generatedSubtasks = aiResult.mainResult.subtasks || [];
|
const generatedSubtasks = aiResult.mainResult.subtasks || [];
|
||||||
|
|
||||||
|
// Post-process generated subtasks to ensure defaults
|
||||||
|
const processedGeneratedSubtasks = generatedSubtasks.map((subtask) => ({
|
||||||
|
...subtask,
|
||||||
|
status: subtask.status || 'pending',
|
||||||
|
testStrategy: subtask.testStrategy || ''
|
||||||
|
}));
|
||||||
|
|
||||||
// Update task with preserved subtasks + newly generated ones
|
// Update task with preserved subtasks + newly generated ones
|
||||||
task.subtasks = [...preservedSubtasks, ...generatedSubtasks];
|
task.subtasks = [...preservedSubtasks, ...processedGeneratedSubtasks];
|
||||||
|
|
||||||
return {
|
return {
|
||||||
updatedTask: task,
|
updatedTask: task,
|
||||||
regenerated: true,
|
regenerated: true,
|
||||||
preserved: preservedSubtasks.length,
|
preserved: preservedSubtasks.length,
|
||||||
generated: generatedSubtasks.length
|
generated: processedGeneratedSubtasks.length
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log(
|
log(
|
||||||
@@ -457,6 +466,7 @@ ADJUSTMENT REQUIREMENTS:
|
|||||||
- description: Updated task description
|
- description: Updated task description
|
||||||
- details: Updated implementation details
|
- details: Updated implementation details
|
||||||
- testStrategy: Updated test strategy
|
- testStrategy: Updated test strategy
|
||||||
|
- priority: Task priority ('low', 'medium', or 'high')
|
||||||
|
|
||||||
Ensure the JSON is valid and properly formatted.`;
|
Ensure the JSON is valid and properly formatted.`;
|
||||||
|
|
||||||
@@ -501,14 +511,11 @@ async function adjustTaskComplexity(
|
|||||||
.string()
|
.string()
|
||||||
.min(1)
|
.min(1)
|
||||||
.describe('Updated testing approach for the adjusted scope'),
|
.describe('Updated testing approach for the adjusted scope'),
|
||||||
priority: z
|
priority: z.enum(['low', 'medium', 'high']).describe('Task priority level')
|
||||||
.enum(['low', 'medium', 'high'])
|
|
||||||
.optional()
|
|
||||||
.describe('Task priority level')
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const aiResult = await generateObjectService({
|
const aiResult = await generateObjectService({
|
||||||
role: 'main',
|
role: context.research ? 'research' : 'main',
|
||||||
session: context.session,
|
session: context.session,
|
||||||
systemPrompt,
|
systemPrompt,
|
||||||
prompt,
|
prompt,
|
||||||
@@ -520,10 +527,16 @@ async function adjustTaskComplexity(
|
|||||||
|
|
||||||
const updatedTaskData = aiResult.mainResult;
|
const updatedTaskData = aiResult.mainResult;
|
||||||
|
|
||||||
|
// Ensure priority has a value (in case AI didn't provide one)
|
||||||
|
const processedTaskData = {
|
||||||
|
...updatedTaskData,
|
||||||
|
priority: updatedTaskData.priority || task.priority || 'medium'
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
updatedTask: {
|
updatedTask: {
|
||||||
...task,
|
...task,
|
||||||
...updatedTaskData
|
...processedTaskData
|
||||||
},
|
},
|
||||||
telemetryData: aiResult.telemetryData
|
telemetryData: aiResult.telemetryData
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,4 +1,12 @@
|
|||||||
import { generateObject, generateText, streamText } from 'ai';
|
import {
|
||||||
|
generateObject,
|
||||||
|
generateText,
|
||||||
|
streamText,
|
||||||
|
zodSchema,
|
||||||
|
JSONParseError,
|
||||||
|
NoObjectGeneratedError
|
||||||
|
} from 'ai';
|
||||||
|
import { jsonrepair } from 'jsonrepair';
|
||||||
import { log } from '../../scripts/modules/utils.js';
|
import { log } from '../../scripts/modules/utils.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -206,8 +214,8 @@ export class BaseAIProvider {
|
|||||||
const result = await generateObject({
|
const result = await generateObject({
|
||||||
model: client(params.modelId),
|
model: client(params.modelId),
|
||||||
messages: params.messages,
|
messages: params.messages,
|
||||||
schema: params.schema,
|
schema: zodSchema(params.schema),
|
||||||
mode: 'auto',
|
mode: params.mode || 'auto',
|
||||||
maxTokens: params.maxTokens,
|
maxTokens: params.maxTokens,
|
||||||
temperature: params.temperature
|
temperature: params.temperature
|
||||||
});
|
});
|
||||||
@@ -226,6 +234,43 @@ export class BaseAIProvider {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
// Check if this is a JSON parsing error that we can potentially fix
|
||||||
|
if (
|
||||||
|
NoObjectGeneratedError.isInstance(error) &&
|
||||||
|
JSONParseError.isInstance(error.cause) &&
|
||||||
|
error.cause.text
|
||||||
|
) {
|
||||||
|
log(
|
||||||
|
'warn',
|
||||||
|
`${this.name} generated malformed JSON, attempting to repair...`
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Use jsonrepair to fix the malformed JSON
|
||||||
|
const repairedJson = jsonrepair(error.cause.text);
|
||||||
|
const parsed = JSON.parse(repairedJson);
|
||||||
|
|
||||||
|
log('info', `Successfully repaired ${this.name} JSON output`);
|
||||||
|
|
||||||
|
// Return in the expected format
|
||||||
|
return {
|
||||||
|
object: parsed,
|
||||||
|
usage: {
|
||||||
|
// Extract usage information from the error if available
|
||||||
|
inputTokens: error.usage?.promptTokens || 0,
|
||||||
|
outputTokens: error.usage?.completionTokens || 0,
|
||||||
|
totalTokens: error.usage?.totalTokens || 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (repairError) {
|
||||||
|
log(
|
||||||
|
'error',
|
||||||
|
`Failed to repair ${this.name} JSON: ${repairError.message}`
|
||||||
|
);
|
||||||
|
// Fall through to handleError with original error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
this.handleError('object generation', error);
|
this.handleError('object generation', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,4 +44,21 @@ export class PerplexityAIProvider extends BaseAIProvider {
|
|||||||
this.handleError('client initialization', error);
|
this.handleError('client initialization', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override generateObject to use JSON mode for Perplexity
|
||||||
|
*
|
||||||
|
* NOTE: Perplexity models (especially sonar models) have known issues
|
||||||
|
* generating valid JSON, particularly with array fields. They often
|
||||||
|
* generate malformed JSON like "dependencies": , instead of "dependencies": []
|
||||||
|
*
|
||||||
|
* The base provider now handles JSON repair automatically for all providers.
|
||||||
|
*/
|
||||||
|
async generateObject(params) {
|
||||||
|
// Force JSON mode for Perplexity as it may help with reliability
|
||||||
|
return super.generateObject({
|
||||||
|
...params,
|
||||||
|
mode: 'json'
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user