feat: Implement stale validation cleanup and improve GitHub issue handling

- Added a scheduled task to clean up stale validation entries every hour, preventing memory leaks.
- Enhanced the `getAllValidations` function to read validation files in parallel for improved performance.
- Updated the `fetchLinkedPRs` function to use `spawn` for safer execution of GitHub CLI commands, mitigating shell injection risks.
- Modified event handling in the GitHub issues view to utilize the model for validation, ensuring consistency and reducing stale closure issues.
- Introduced a new property in the issue validation event to track the model used for validation.
This commit is contained in:
Kacper
2025-12-23 22:21:08 +01:00
parent 0c9f05ee38
commit d1418aa054
7 changed files with 85 additions and 27 deletions

View File

@@ -48,6 +48,7 @@ import { createClaudeRoutes } from './routes/claude/index.js';
import { ClaudeUsageService } from './services/claude-usage-service.js';
import { createGitHubRoutes } from './routes/github/index.js';
import { createContextRoutes } from './routes/context/index.js';
import { cleanupStaleValidations } from './routes/github/routes/validation-common.js';
// Load environment variables
dotenv.config();
@@ -123,6 +124,15 @@ const claudeUsageService = new ClaudeUsageService();
console.log('[Server] Agent service initialized');
})();
// Run stale validation cleanup every hour to prevent memory leaks from crashed validations
const VALIDATION_CLEANUP_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
setInterval(() => {
const cleaned = cleanupStaleValidations();
if (cleaned > 0) {
console.log(`[Server] Cleaned up ${cleaned} stale validation entries`);
}
}, VALIDATION_CLEANUP_INTERVAL_MS);
// Mount API routes - health is unauthenticated for monitoring
app.use('/api/health', createHealthRoutes());

View File

@@ -67,31 +67,33 @@ export async function readValidation(
* @returns Array of stored validations
*/
export async function getAllValidations(projectPath: string): Promise<StoredValidation[]> {
const validations: StoredValidation[] = [];
const validationsDir = getValidationsDir(projectPath);
try {
const dirs = await secureFs.readdir(validationsDir, { withFileTypes: true });
for (const dir of dirs) {
if (dir.isDirectory()) {
// Read all validation files in parallel for better performance
const promises = dirs
.filter((dir) => dir.isDirectory())
.map((dir) => {
const issueNumber = parseInt(dir.name, 10);
if (!isNaN(issueNumber)) {
const validation = await readValidation(projectPath, issueNumber);
if (validation) {
validations.push(validation);
}
return readValidation(projectPath, issueNumber);
}
}
}
return Promise.resolve(null);
});
const results = await Promise.all(promises);
const validations = results.filter((v): v is StoredValidation => v !== null);
// Sort by issue number
validations.sort((a, b) => a.issueNumber - b.issueNumber);
return validations;
} catch {
// Directory doesn't exist
return [];
}
// Sort by issue number
validations.sort((a, b) => a.issueNumber - b.issueNumber);
return validations;
}
/**

View File

@@ -2,6 +2,7 @@
* POST /list-issues endpoint - List GitHub issues for a project
*/
import { spawn } from 'child_process';
import type { Request, Response } from 'express';
import { execAsync, execEnv, getErrorMessage, logError } from './common.js';
import { checkGitHubRemote } from './check-github-remote.js';
@@ -109,17 +110,48 @@ async function fetchLinkedPRs(
}`;
try {
const { stdout } = await execAsync(`gh api graphql -f query='${query}'`, {
cwd: projectPath,
env: execEnv,
// Use spawn with stdin to avoid shell injection vulnerabilities
const response = await new Promise<Record<string, unknown>>((resolve, reject) => {
const gh = spawn('gh', ['api', 'graphql', '-f', 'query=-'], {
cwd: projectPath,
env: execEnv,
});
let stdout = '';
let stderr = '';
gh.stdout.on('data', (data: Buffer) => (stdout += data.toString()));
gh.stderr.on('data', (data: Buffer) => (stderr += data.toString()));
gh.on('close', (code) => {
if (code !== 0) {
return reject(new Error(`gh process exited with code ${code}: ${stderr}`));
}
try {
resolve(JSON.parse(stdout));
} catch (e) {
reject(e);
}
});
gh.stdin.write(query);
gh.stdin.end();
});
const response = JSON.parse(stdout);
const repoData = response?.data?.repository;
const repoData = (response?.data as Record<string, unknown>)?.repository as Record<
string,
unknown
> | null;
if (repoData) {
batch.forEach((issueNum, idx) => {
const issueData = repoData[`issue${idx}`];
const issueData = repoData[`issue${idx}`] as {
timelineItems?: {
nodes?: Array<{
source?: { number?: number; title?: string; state?: string; url?: string };
subject?: { number?: number; title?: string; state?: string; url?: string };
}>;
};
} | null;
if (issueData?.timelineItems?.nodes) {
const linkedPRs: LinkedPullRequest[] = [];
const seenPRs = new Set<number>();
@@ -130,9 +162,9 @@ async function fetchLinkedPRs(
seenPRs.add(pr.number);
linkedPRs.push({
number: pr.number,
title: pr.title,
state: pr.state.toLowerCase(),
url: pr.url,
title: pr.title || '',
state: (pr.state || '').toLowerCase(),
url: pr.url || '',
});
}
}
@@ -143,9 +175,12 @@ async function fetchLinkedPRs(
}
});
}
} catch {
} catch (error) {
// If GraphQL fails, continue without linked PRs
console.warn('Failed to fetch linked PRs via GraphQL');
console.warn(
'Failed to fetch linked PRs via GraphQL:',
error instanceof Error ? error.message : error
);
}
}

View File

@@ -159,6 +159,7 @@ async function runValidation(
issueTitle,
result: validationResult,
projectPath,
model,
};
events.emit('issue-validation:event', completeEvent);
} catch (error) {

View File

@@ -162,14 +162,14 @@ export function GitHubIssuesView() {
return next;
});
// Update cached validations (use event.issueTitle to avoid stale closure)
// Update cached validations (use event.model to avoid stale closure race condition)
setCachedValidations((prev) => {
const next = new Map(prev);
next.set(event.issueNumber, {
issueNumber: event.issueNumber,
issueTitle: event.issueTitle,
validatedAt: new Date().toISOString(),
model: validationModel,
model: event.model,
result: event.result,
});
return next;

View File

@@ -2730,6 +2730,7 @@ function createMockGitHubAPI(): GitHubAPI {
cb({
type: 'issue_validation_complete',
issueNumber: issue.issueNumber,
issueTitle: issue.issueTitle,
result: {
verdict: 'valid' as const,
confidence: 'medium' as const,
@@ -2739,6 +2740,7 @@ function createMockGitHubAPI(): GitHubAPI {
estimatedComplexity: 'moderate' as const,
},
projectPath,
model: model || 'sonnet',
})
);
}, 2000);
@@ -2772,6 +2774,12 @@ function createMockGitHubAPI(): GitHubAPI {
validations: [],
};
},
markValidationViewed: async (projectPath: string, issueNumber: number) => {
console.log('[Mock] Marking validation as viewed:', { projectPath, issueNumber });
return {
success: true,
};
},
onValidationEvent: (callback: (event: IssueValidationEvent) => void) => {
mockValidationCallbacks.push(callback);
return () => {

View File

@@ -101,6 +101,8 @@ export type IssueValidationEvent =
issueTitle: string;
result: IssueValidationResult;
projectPath: string;
/** Model used for validation (opus, sonnet, haiku) */
model: AgentModel;
}
| {
type: 'issue_validation_error';