From 5d94f1b471ff9e9d3f785626176a0420fadfcb6f Mon Sep 17 00:00:00 2001
From: Ralph Khreish <35776126+Crunchyman-ralph@users.noreply.github.com>
Date: Thu, 14 Aug 2025 00:36:18 +0200
Subject: [PATCH] chore: add a bunch of automations (#1132)
* chore: add a bunch of automations
* chore: run format
* Update .github/scripts/auto-close-duplicates.mjs
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
* chore: run format
---------
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
---
.claude/commands/dedupe.md | 38 +++
.github/scripts/auto-close-duplicates.mjs | 259 ++++++++++++++++++
.../scripts/backfill-duplicate-comments.mjs | 178 ++++++++++++
.github/workflows/auto-close-duplicates.yml | 31 +++
.../workflows/backfill-duplicate-comments.yml | 46 ++++
.github/workflows/claude-dedupe-issues.yml | 81 ++++++
.github/workflows/claude-issue-triage.yml | 107 ++++++++
.github/workflows/claude.yml | 36 +++
.github/workflows/log-issue-events.yml | 176 ++++++++++++
.github/workflows/weekly-metrics-discord.yml | 96 +++++++
10 files changed, 1048 insertions(+)
create mode 100644 .claude/commands/dedupe.md
create mode 100644 .github/scripts/auto-close-duplicates.mjs
create mode 100644 .github/scripts/backfill-duplicate-comments.mjs
create mode 100644 .github/workflows/auto-close-duplicates.yml
create mode 100644 .github/workflows/backfill-duplicate-comments.yml
create mode 100644 .github/workflows/claude-dedupe-issues.yml
create mode 100644 .github/workflows/claude-issue-triage.yml
create mode 100644 .github/workflows/claude.yml
create mode 100644 .github/workflows/log-issue-events.yml
create mode 100644 .github/workflows/weekly-metrics-discord.yml
diff --git a/.claude/commands/dedupe.md b/.claude/commands/dedupe.md
new file mode 100644
index 00000000..121f271c
--- /dev/null
+++ b/.claude/commands/dedupe.md
@@ -0,0 +1,38 @@
+---
+allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
+description: Find duplicate GitHub issues
+---
+
+Find up to 3 likely duplicate issues for a given GitHub issue.
+
+To do this, follow these steps precisely:
+
+1. Use an agent to check if the Github issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicates comment that you made earlier. If so, do not proceed.
+2. Use an agent to view a Github issue, and ask the agent to return a summary of the issue
+3. Then, launch 5 parallel agents to search Github for duplicates of this issue, using diverse keywords and search approaches, using the summary from #1
+4. Next, feed the results from #1 and #2 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
+5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
+
+Notes (be sure to tell this to your agents, too):
+
+- Use `gh` to interact with Github, rather than web fetch
+- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
+- Make a todo list first
+- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
+
+---
+
+Found 3 possible duplicate issues:
+
+1.
+2.
+3.
+
+This issue will be automatically closed as a duplicate in 3 days.
+
+- If your issue is a duplicate, please close it and 👍 the existing issue instead
+- To prevent auto-closure, add a comment or 👎 this comment
+
+🤖 Generated with [Claude Code](https://claude.ai/code)
+
+---
\ No newline at end of file
diff --git a/.github/scripts/auto-close-duplicates.mjs b/.github/scripts/auto-close-duplicates.mjs
new file mode 100644
index 00000000..cf730a8c
--- /dev/null
+++ b/.github/scripts/auto-close-duplicates.mjs
@@ -0,0 +1,259 @@
+#!/usr/bin/env node
+
+async function githubRequest(endpoint, token, method = 'GET', body) {
+ const response = await fetch(`https://api.github.com${endpoint}`, {
+ method,
+ headers: {
+ Authorization: `Bearer ${token}`,
+ Accept: 'application/vnd.github.v3+json',
+ 'User-Agent': 'auto-close-duplicates-script',
+ ...(body && { 'Content-Type': 'application/json' })
+ },
+ ...(body && { body: JSON.stringify(body) })
+ });
+
+ if (!response.ok) {
+ throw new Error(
+ `GitHub API request failed: ${response.status} ${response.statusText}`
+ );
+ }
+
+ return response.json();
+}
+
+function extractDuplicateIssueNumber(commentBody) {
+ const match = commentBody.match(/#(\d+)/);
+ return match ? parseInt(match[1], 10) : null;
+}
+
+async function closeIssueAsDuplicate(
+ owner,
+ repo,
+ issueNumber,
+ duplicateOfNumber,
+ token
+) {
+ await githubRequest(
+ `/repos/${owner}/${repo}/issues/${issueNumber}`,
+ token,
+ 'PATCH',
+ {
+ state: 'closed',
+ state_reason: 'not_planned',
+ labels: ['duplicate']
+ }
+ );
+
+ await githubRequest(
+ `/repos/${owner}/${repo}/issues/${issueNumber}/comments`,
+ token,
+ 'POST',
+ {
+ body: `This issue has been automatically closed as a duplicate of #${duplicateOfNumber}.
+
+If this is incorrect, please re-open this issue or create a new one.
+
+🤖 Generated with [Task Master Bot]`
+ }
+ );
+}
+
+async function autoCloseDuplicates() {
+ console.log('[DEBUG] Starting auto-close duplicates script');
+
+ const token = process.env.GITHUB_TOKEN;
+ if (!token) {
+ throw new Error('GITHUB_TOKEN environment variable is required');
+ }
+ console.log('[DEBUG] GitHub token found');
+
+ const owner = process.env.GITHUB_REPOSITORY_OWNER || 'eyaltoledano';
+ const repo = process.env.GITHUB_REPOSITORY_NAME || 'claude-task-master';
+ console.log(`[DEBUG] Repository: ${owner}/${repo}`);
+
+ const threeDaysAgo = new Date();
+ threeDaysAgo.setDate(threeDaysAgo.getDate() - 3);
+ console.log(
+ `[DEBUG] Checking for duplicate comments older than: ${threeDaysAgo.toISOString()}`
+ );
+
+ console.log('[DEBUG] Fetching open issues created more than 3 days ago...');
+ const allIssues = [];
+ let page = 1;
+ const perPage = 100;
+
+ const MAX_PAGES = 50; // Increase limit for larger repos
+ let foundRecentIssue = false;
+
+ while (true) {
+ const pageIssues = await githubRequest(
+ `/repos/${owner}/${repo}/issues?state=open&per_page=${perPage}&page=${page}&sort=created&direction=desc`,
+ token
+ );
+
+ if (pageIssues.length === 0) break;
+
+ // Filter for issues created more than 3 days ago
+ const oldEnoughIssues = pageIssues.filter(
+ (issue) => new Date(issue.created_at) <= threeDaysAgo
+ );
+
+ allIssues.push(...oldEnoughIssues);
+
+ // If all issues on this page are newer than 3 days, we can stop
+ if (oldEnoughIssues.length === 0 && page === 1) {
+ foundRecentIssue = true;
+ break;
+ }
+
+ // If we found some old issues but not all, continue to next page
+ // as there might be more old issues
+ page++;
+
+ // Safety limit to avoid infinite loops
+ if (page > MAX_PAGES) {
+ console.log(`[WARNING] Reached maximum page limit of ${MAX_PAGES}`);
+ break;
+ }
+ }
+
+ const issues = allIssues;
+ console.log(`[DEBUG] Found ${issues.length} open issues`);
+
+ let processedCount = 0;
+ let candidateCount = 0;
+
+ for (const issue of issues) {
+ processedCount++;
+ console.log(
+ `[DEBUG] Processing issue #${issue.number} (${processedCount}/${issues.length}): ${issue.title}`
+ );
+
+ console.log(`[DEBUG] Fetching comments for issue #${issue.number}...`);
+ const comments = await githubRequest(
+ `/repos/${owner}/${repo}/issues/${issue.number}/comments`,
+ token
+ );
+ console.log(
+ `[DEBUG] Issue #${issue.number} has ${comments.length} comments`
+ );
+
+ const dupeComments = comments.filter(
+ (comment) =>
+ comment.body.includes('Found') &&
+ comment.body.includes('possible duplicate') &&
+ comment.user.type === 'Bot'
+ );
+ console.log(
+ `[DEBUG] Issue #${issue.number} has ${dupeComments.length} duplicate detection comments`
+ );
+
+ if (dupeComments.length === 0) {
+ console.log(
+ `[DEBUG] Issue #${issue.number} - no duplicate comments found, skipping`
+ );
+ continue;
+ }
+
+ const lastDupeComment = dupeComments[dupeComments.length - 1];
+ const dupeCommentDate = new Date(lastDupeComment.created_at);
+ console.log(
+ `[DEBUG] Issue #${
+ issue.number
+ } - most recent duplicate comment from: ${dupeCommentDate.toISOString()}`
+ );
+
+ if (dupeCommentDate > threeDaysAgo) {
+ console.log(
+ `[DEBUG] Issue #${issue.number} - duplicate comment is too recent, skipping`
+ );
+ continue;
+ }
+ console.log(
+ `[DEBUG] Issue #${
+ issue.number
+ } - duplicate comment is old enough (${Math.floor(
+ (Date.now() - dupeCommentDate.getTime()) / (1000 * 60 * 60 * 24)
+ )} days)`
+ );
+
+ const commentsAfterDupe = comments.filter(
+ (comment) => new Date(comment.created_at) > dupeCommentDate
+ );
+ console.log(
+ `[DEBUG] Issue #${issue.number} - ${commentsAfterDupe.length} comments after duplicate detection`
+ );
+
+ if (commentsAfterDupe.length > 0) {
+ console.log(
+ `[DEBUG] Issue #${issue.number} - has activity after duplicate comment, skipping`
+ );
+ continue;
+ }
+
+ console.log(
+ `[DEBUG] Issue #${issue.number} - checking reactions on duplicate comment...`
+ );
+ const reactions = await githubRequest(
+ `/repos/${owner}/${repo}/issues/comments/${lastDupeComment.id}/reactions`,
+ token
+ );
+ console.log(
+ `[DEBUG] Issue #${issue.number} - duplicate comment has ${reactions.length} reactions`
+ );
+
+ const authorThumbsDown = reactions.some(
+ (reaction) =>
+ reaction.user.id === issue.user.id && reaction.content === '-1'
+ );
+ console.log(
+ `[DEBUG] Issue #${issue.number} - author thumbs down reaction: ${authorThumbsDown}`
+ );
+
+ if (authorThumbsDown) {
+ console.log(
+ `[DEBUG] Issue #${issue.number} - author disagreed with duplicate detection, skipping`
+ );
+ continue;
+ }
+
+ const duplicateIssueNumber = extractDuplicateIssueNumber(
+ lastDupeComment.body
+ );
+ if (!duplicateIssueNumber) {
+ console.log(
+ `[DEBUG] Issue #${issue.number} - could not extract duplicate issue number from comment, skipping`
+ );
+ continue;
+ }
+
+ candidateCount++;
+ const issueUrl = `https://github.com/${owner}/${repo}/issues/${issue.number}`;
+
+ try {
+ console.log(
+ `[INFO] Auto-closing issue #${issue.number} as duplicate of #${duplicateIssueNumber}: ${issueUrl}`
+ );
+ await closeIssueAsDuplicate(
+ owner,
+ repo,
+ issue.number,
+ duplicateIssueNumber,
+ token
+ );
+ console.log(
+ `[SUCCESS] Successfully closed issue #${issue.number} as duplicate of #${duplicateIssueNumber}`
+ );
+ } catch (error) {
+ console.error(
+ `[ERROR] Failed to close issue #${issue.number} as duplicate: ${error}`
+ );
+ }
+ }
+
+ console.log(
+ `[DEBUG] Script completed. Processed ${processedCount} issues, found ${candidateCount} candidates for auto-close`
+ );
+}
+
+autoCloseDuplicates().catch(console.error);
diff --git a/.github/scripts/backfill-duplicate-comments.mjs b/.github/scripts/backfill-duplicate-comments.mjs
new file mode 100644
index 00000000..2039b7ad
--- /dev/null
+++ b/.github/scripts/backfill-duplicate-comments.mjs
@@ -0,0 +1,178 @@
+#!/usr/bin/env node
+
+async function githubRequest(endpoint, token, method = 'GET', body) {
+ const response = await fetch(`https://api.github.com${endpoint}`, {
+ method,
+ headers: {
+ Authorization: `Bearer ${token}`,
+ Accept: 'application/vnd.github.v3+json',
+ 'User-Agent': 'backfill-duplicate-comments-script',
+ ...(body && { 'Content-Type': 'application/json' })
+ },
+ ...(body && { body: JSON.stringify(body) })
+ });
+
+ if (!response.ok) {
+ throw new Error(
+ `GitHub API request failed: ${response.status} ${response.statusText}`
+ );
+ }
+
+ return response.json();
+}
+
+async function triggerDedupeWorkflow(
+ owner,
+ repo,
+ issueNumber,
+ token,
+ dryRun = true
+) {
+ if (dryRun) {
+ console.log(
+ `[DRY RUN] Would trigger dedupe workflow for issue #${issueNumber}`
+ );
+ return;
+ }
+
+ await githubRequest(
+ `/repos/${owner}/${repo}/actions/workflows/claude-dedupe-issues.yml/dispatches`,
+ token,
+ 'POST',
+ {
+ ref: 'main',
+ inputs: {
+ issue_number: issueNumber.toString()
+ }
+ }
+ );
+}
+
+async function backfillDuplicateComments() {
+ console.log('[DEBUG] Starting backfill duplicate comments script');
+
+ const token = process.env.GITHUB_TOKEN;
+ if (!token) {
+ throw new Error(`GITHUB_TOKEN environment variable is required
+
+Usage:
+ node .github/scripts/backfill-duplicate-comments.mjs
+
+Environment Variables:
+ GITHUB_TOKEN - GitHub personal access token with repo and actions permissions (required)
+ DRY_RUN - Set to "false" to actually trigger workflows (default: true for safety)
+ DAYS_BACK - How many days back to look for old issues (default: 90)`);
+ }
+ console.log('[DEBUG] GitHub token found');
+
+ const owner = process.env.GITHUB_REPOSITORY_OWNER || 'eyaltoledano';
+ const repo = process.env.GITHUB_REPOSITORY_NAME || 'claude-task-master';
+ const dryRun = process.env.DRY_RUN !== 'false';
+ const daysBack = parseInt(process.env.DAYS_BACK || '90', 10);
+
+ console.log(`[DEBUG] Repository: ${owner}/${repo}`);
+ console.log(`[DEBUG] Dry run mode: ${dryRun}`);
+ console.log(`[DEBUG] Looking back ${daysBack} days`);
+
+ const cutoffDate = new Date();
+ cutoffDate.setDate(cutoffDate.getDate() - daysBack);
+
+ console.log(
+ `[DEBUG] Fetching issues created since ${cutoffDate.toISOString()}...`
+ );
+ const allIssues = [];
+ let page = 1;
+ const perPage = 100;
+
+ while (true) {
+ const pageIssues = await githubRequest(
+ `/repos/${owner}/${repo}/issues?state=all&per_page=${perPage}&page=${page}&since=${cutoffDate.toISOString()}`,
+ token
+ );
+
+ if (pageIssues.length === 0) break;
+
+ allIssues.push(...pageIssues);
+ page++;
+
+ // Safety limit to avoid infinite loops
+ if (page > 100) {
+ console.log('[DEBUG] Reached page limit, stopping pagination');
+ break;
+ }
+ }
+
+ console.log(
+ `[DEBUG] Found ${allIssues.length} issues from the last ${daysBack} days`
+ );
+
+ let processedCount = 0;
+ let candidateCount = 0;
+ let triggeredCount = 0;
+
+ for (const issue of allIssues) {
+ processedCount++;
+ console.log(
+ `[DEBUG] Processing issue #${issue.number} (${processedCount}/${allIssues.length}): ${issue.title}`
+ );
+
+ console.log(`[DEBUG] Fetching comments for issue #${issue.number}...`);
+ const comments = await githubRequest(
+ `/repos/${owner}/${repo}/issues/${issue.number}/comments`,
+ token
+ );
+ console.log(
+ `[DEBUG] Issue #${issue.number} has ${comments.length} comments`
+ );
+
+ // Look for existing duplicate detection comments (from the dedupe bot)
+ const dupeDetectionComments = comments.filter(
+ (comment) =>
+ comment.body.includes('Found') &&
+ comment.body.includes('possible duplicate') &&
+ comment.user.type === 'Bot'
+ );
+
+ console.log(
+ `[DEBUG] Issue #${issue.number} has ${dupeDetectionComments.length} duplicate detection comments`
+ );
+
+ // Skip if there's already a duplicate detection comment
+ if (dupeDetectionComments.length > 0) {
+ console.log(
+ `[DEBUG] Issue #${issue.number} already has duplicate detection comment, skipping`
+ );
+ continue;
+ }
+
+ candidateCount++;
+ const issueUrl = `https://github.com/${owner}/${repo}/issues/${issue.number}`;
+
+ try {
+ console.log(
+ `[INFO] ${dryRun ? '[DRY RUN] ' : ''}Triggering dedupe workflow for issue #${issue.number}: ${issueUrl}`
+ );
+ await triggerDedupeWorkflow(owner, repo, issue.number, token, dryRun);
+
+ if (!dryRun) {
+ console.log(
+ `[SUCCESS] Successfully triggered dedupe workflow for issue #${issue.number}`
+ );
+ }
+ triggeredCount++;
+ } catch (error) {
+ console.error(
+ `[ERROR] Failed to trigger workflow for issue #${issue.number}: ${error}`
+ );
+ }
+
+ // Add a delay between workflow triggers to avoid overwhelming the system
+ await new Promise((resolve) => setTimeout(resolve, 1000));
+ }
+
+ console.log(
+ `[DEBUG] Script completed. Processed ${processedCount} issues, found ${candidateCount} candidates without duplicate comments, ${dryRun ? 'would trigger' : 'triggered'} ${triggeredCount} workflows`
+ );
+}
+
+backfillDuplicateComments().catch(console.error);
diff --git a/.github/workflows/auto-close-duplicates.yml b/.github/workflows/auto-close-duplicates.yml
new file mode 100644
index 00000000..d26edc4d
--- /dev/null
+++ b/.github/workflows/auto-close-duplicates.yml
@@ -0,0 +1,31 @@
+name: Auto-close duplicate issues
+# description: Auto-closes issues that are duplicates of existing issues
+
+on:
+ schedule:
+ - cron: "0 9 * * *" # Runs daily at 9 AM UTC
+ workflow_dispatch:
+
+jobs:
+ auto-close-duplicates:
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ permissions:
+ contents: read
+ issues: write # Need write permission to close issues and add comments
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+
+ - name: Auto-close duplicate issues
+ run: node .github/scripts/auto-close-duplicates.mjs
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_REPOSITORY_OWNER: ${{ github.repository_owner }}
+ GITHUB_REPOSITORY_NAME: ${{ github.event.repository.name }}
diff --git a/.github/workflows/backfill-duplicate-comments.yml b/.github/workflows/backfill-duplicate-comments.yml
new file mode 100644
index 00000000..fa58ff24
--- /dev/null
+++ b/.github/workflows/backfill-duplicate-comments.yml
@@ -0,0 +1,46 @@
+name: Backfill Duplicate Comments
+# description: Triggers duplicate detection for old issues that don't have duplicate comments
+
+on:
+ workflow_dispatch:
+ inputs:
+ days_back:
+ description: "How many days back to look for old issues"
+ required: false
+ default: "90"
+ type: string
+ dry_run:
+ description: "Dry run mode (true to only log what would be done)"
+ required: false
+ default: "true"
+ type: choice
+ options:
+ - "true"
+ - "false"
+
+jobs:
+ backfill-duplicate-comments:
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ permissions:
+ contents: read
+ issues: read
+ actions: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+
+ - name: Backfill duplicate comments
+ run: node .github/scripts/backfill-duplicate-comments.mjs
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_REPOSITORY_OWNER: ${{ github.repository_owner }}
+ GITHUB_REPOSITORY_NAME: ${{ github.event.repository.name }}
+ DAYS_BACK: ${{ inputs.days_back }}
+ DRY_RUN: ${{ inputs.dry_run }}
diff --git a/.github/workflows/claude-dedupe-issues.yml b/.github/workflows/claude-dedupe-issues.yml
new file mode 100644
index 00000000..41d50c3f
--- /dev/null
+++ b/.github/workflows/claude-dedupe-issues.yml
@@ -0,0 +1,81 @@
+name: Claude Issue Dedupe
+# description: Automatically dedupe GitHub issues using Claude Code
+
+on:
+ issues:
+ types: [opened]
+ workflow_dispatch:
+ inputs:
+ issue_number:
+ description: "Issue number to process for duplicate detection"
+ required: true
+ type: string
+
+jobs:
+ claude-dedupe-issues:
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ permissions:
+ contents: read
+ issues: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Run Claude Code slash command
+ uses: anthropics/claude-code-base-action@beta
+ with:
+ prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
+ anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
+ claude_env: |
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Log duplicate comment event to Statsig
+ if: always()
+ env:
+ STATSIG_API_KEY: ${{ secrets.STATSIG_API_KEY }}
+ run: |
+ ISSUE_NUMBER=${{ github.event.issue.number || inputs.issue_number }}
+ REPO=${{ github.repository }}
+
+ if [ -z "$STATSIG_API_KEY" ]; then
+ echo "STATSIG_API_KEY not found, skipping Statsig logging"
+ exit 0
+ fi
+
+ # Prepare the event payload
+ EVENT_PAYLOAD=$(jq -n \
+ --arg issue_number "$ISSUE_NUMBER" \
+ --arg repo "$REPO" \
+ --arg triggered_by "${{ github.event_name }}" \
+ '{
+ events: [{
+ eventName: "github_duplicate_comment_added",
+ value: 1,
+ metadata: {
+ repository: $repo,
+ issue_number: ($issue_number | tonumber),
+ triggered_by: $triggered_by,
+ workflow_run_id: "${{ github.run_id }}"
+ },
+ time: (now | floor | tostring)
+ }]
+ }')
+
+ # Send to Statsig API
+ echo "Logging duplicate comment event to Statsig for issue #${ISSUE_NUMBER}"
+
+ RESPONSE=$(curl -s -w "\n%{http_code}" -X POST https://events.statsigapi.net/v1/log_event \
+ -H "Content-Type: application/json" \
+ -H "STATSIG-API-KEY: ${STATSIG_API_KEY}" \
+ -d "$EVENT_PAYLOAD")
+
+ HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
+ BODY=$(echo "$RESPONSE" | head -n-1)
+
+ if [ "$HTTP_CODE" -eq 200 ] || [ "$HTTP_CODE" -eq 202 ]; then
+ echo "Successfully logged duplicate comment event for issue #${ISSUE_NUMBER}"
+ else
+ echo "Failed to log duplicate comment event for issue #${ISSUE_NUMBER}. HTTP ${HTTP_CODE}: ${BODY}"
+ fi
diff --git a/.github/workflows/claude-issue-triage.yml b/.github/workflows/claude-issue-triage.yml
new file mode 100644
index 00000000..606ac1d3
--- /dev/null
+++ b/.github/workflows/claude-issue-triage.yml
@@ -0,0 +1,107 @@
+name: Claude Issue Triage
+# description: Automatically triage GitHub issues using Claude Code
+
+on:
+ issues:
+ types: [opened]
+
+jobs:
+ triage-issue:
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ permissions:
+ contents: read
+ issues: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Create triage prompt
+ run: |
+ mkdir -p /tmp/claude-prompts
+ cat > /tmp/claude-prompts/triage-prompt.txt << 'EOF'
+ You're an issue triage assistant for GitHub issues. Your task is to analyze the issue and select appropriate labels from the provided list.
+
+ IMPORTANT: Don't post any comments or messages to the issue. Your only action should be to apply labels.
+
+ Issue Information:
+ - REPO: ${{ github.repository }}
+ - ISSUE_NUMBER: ${{ github.event.issue.number }}
+
+ TASK OVERVIEW:
+
+ 1. First, fetch the list of labels available in this repository by running: `gh label list`. Run exactly this command with nothing else.
+
+ 2. Next, use the GitHub tools to get context about the issue:
+ - You have access to these tools:
+ - mcp__github__get_issue: Use this to retrieve the current issue's details including title, description, and existing labels
+ - mcp__github__get_issue_comments: Use this to read any discussion or additional context provided in the comments
+ - mcp__github__update_issue: Use this to apply labels to the issue (do not use this for commenting)
+ - mcp__github__search_issues: Use this to find similar issues that might provide context for proper categorization and to identify potential duplicate issues
+ - mcp__github__list_issues: Use this to understand patterns in how other issues are labeled
+ - Start by using mcp__github__get_issue to get the issue details
+
+ 3. Analyze the issue content, considering:
+ - The issue title and description
+ - The type of issue (bug report, feature request, question, etc.)
+ - Technical areas mentioned
+ - Severity or priority indicators
+ - User impact
+ - Components affected
+
+ 4. Select appropriate labels from the available labels list provided above:
+ - Choose labels that accurately reflect the issue's nature
+ - Be specific but comprehensive
+ - Select priority labels if you can determine urgency (high-priority, med-priority, or low-priority)
+ - Consider platform labels (android, ios) if applicable
+ - If you find similar issues using mcp__github__search_issues, consider using a "duplicate" label if appropriate. Only do so if the issue is a duplicate of another OPEN issue.
+
+ 5. Apply the selected labels:
+ - Use mcp__github__update_issue to apply your selected labels
+ - DO NOT post any comments explaining your decision
+ - DO NOT communicate directly with users
+ - If no labels are clearly applicable, do not apply any labels
+
+ IMPORTANT GUIDELINES:
+ - Be thorough in your analysis
+ - Only select labels from the provided list above
+ - DO NOT post any comments to the issue
+ - Your ONLY action should be to apply labels using mcp__github__update_issue
+ - It's okay to not add any labels if none are clearly applicable
+ EOF
+
+ - name: Setup GitHub MCP Server
+ run: |
+ mkdir -p /tmp/mcp-config
+ cat > /tmp/mcp-config/mcp-servers.json << 'EOF'
+ {
+ "mcpServers": {
+ "github": {
+ "command": "docker",
+ "args": [
+ "run",
+ "-i",
+ "--rm",
+ "-e",
+ "GITHUB_PERSONAL_ACCESS_TOKEN",
+ "ghcr.io/github/github-mcp-server:sha-7aced2b"
+ ],
+ "env": {
+ "GITHUB_PERSONAL_ACCESS_TOKEN": "${{ secrets.GITHUB_TOKEN }}"
+ }
+ }
+ }
+ }
+ EOF
+
+ - name: Run Claude Code for Issue Triage
+ uses: anthropics/claude-code-base-action@beta
+ with:
+ prompt_file: /tmp/claude-prompts/triage-prompt.txt
+ allowed_tools: "Bash(gh label list),mcp__github__get_issue,mcp__github__get_issue_comments,mcp__github__update_issue,mcp__github__search_issues,mcp__github__list_issues"
+ timeout_minutes: "5"
+ anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
+ mcp_config: /tmp/mcp-config/mcp-servers.json
+ claude_env: |
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml
new file mode 100644
index 00000000..a18aa959
--- /dev/null
+++ b/.github/workflows/claude.yml
@@ -0,0 +1,36 @@
+name: Claude Code
+
+on:
+ issue_comment:
+ types: [created]
+ pull_request_review_comment:
+ types: [created]
+ issues:
+ types: [opened, assigned]
+ pull_request_review:
+ types: [submitted]
+
+jobs:
+ claude:
+ if: |
+ (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
+ (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
+ (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
+ (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ pull-requests: read
+ issues: read
+ id-token: write
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
+ with:
+ fetch-depth: 1
+
+ - name: Run Claude Code
+ id: claude
+ uses: anthropics/claude-code-action@beta
+ with:
+ anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
diff --git a/.github/workflows/log-issue-events.yml b/.github/workflows/log-issue-events.yml
new file mode 100644
index 00000000..99c0a9ea
--- /dev/null
+++ b/.github/workflows/log-issue-events.yml
@@ -0,0 +1,176 @@
+name: Log GitHub Issue Events
+
+on:
+ issues:
+ types: [opened, closed]
+
+jobs:
+ log-issue-created:
+ if: github.event.action == 'opened'
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ permissions:
+ contents: read
+ issues: read
+
+ steps:
+ - name: Log issue creation to Statsig
+ env:
+ STATSIG_API_KEY: ${{ secrets.STATSIG_API_KEY }}
+ run: |
+ ISSUE_NUMBER=${{ github.event.issue.number }}
+ REPO=${{ github.repository }}
+ ISSUE_TITLE=$(echo '${{ github.event.issue.title }}' | sed "s/'/'\\\\''/g")
+ AUTHOR="${{ github.event.issue.user.login }}"
+ CREATED_AT="${{ github.event.issue.created_at }}"
+
+ if [ -z "$STATSIG_API_KEY" ]; then
+ echo "STATSIG_API_KEY not found, skipping Statsig logging"
+ exit 0
+ fi
+
+ # Prepare the event payload
+ EVENT_PAYLOAD=$(jq -n \
+ --arg issue_number "$ISSUE_NUMBER" \
+ --arg repo "$REPO" \
+ --arg title "$ISSUE_TITLE" \
+ --arg author "$AUTHOR" \
+ --arg created_at "$CREATED_AT" \
+ '{
+ events: [{
+ eventName: "github_issue_created",
+ value: 1,
+ metadata: {
+ repository: $repo,
+ issue_number: ($issue_number | tonumber),
+ issue_title: $title,
+ issue_author: $author,
+ created_at: $created_at
+ },
+ time: (now | floor | tostring)
+ }]
+ }')
+
+ # Send to Statsig API
+ echo "Logging issue creation to Statsig for issue #${ISSUE_NUMBER}"
+
+ RESPONSE=$(curl -s -w "\n%{http_code}" -X POST https://events.statsigapi.net/v1/log_event \
+ -H "Content-Type: application/json" \
+ -H "STATSIG-API-KEY: ${STATSIG_API_KEY}" \
+ -d "$EVENT_PAYLOAD")
+
+ HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
+ BODY=$(echo "$RESPONSE" | head -n-1)
+
+ if [ "$HTTP_CODE" -eq 200 ] || [ "$HTTP_CODE" -eq 202 ]; then
+ echo "Successfully logged issue creation for issue #${ISSUE_NUMBER}"
+ else
+ echo "Failed to log issue creation for issue #${ISSUE_NUMBER}. HTTP ${HTTP_CODE}: ${BODY}"
+ fi
+
+ log-issue-closed:
+ if: github.event.action == 'closed'
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ permissions:
+ contents: read
+ issues: read
+
+ steps:
+ - name: Log issue closure to Statsig
+ env:
+ STATSIG_API_KEY: ${{ secrets.STATSIG_API_KEY }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ ISSUE_NUMBER=${{ github.event.issue.number }}
+ REPO=${{ github.repository }}
+ ISSUE_TITLE=$(echo '${{ github.event.issue.title }}' | sed "s/'/'\\\\''/g")
+ CLOSED_BY="${{ github.event.issue.closed_by.login }}"
+ CLOSED_AT="${{ github.event.issue.closed_at }}"
+ STATE_REASON="${{ github.event.issue.state_reason }}"
+
+ if [ -z "$STATSIG_API_KEY" ]; then
+ echo "STATSIG_API_KEY not found, skipping Statsig logging"
+ exit 0
+ fi
+
+ # Get additional issue data via GitHub API
+ echo "Fetching additional issue data for #${ISSUE_NUMBER}"
+ ISSUE_DATA=$(curl -s -H "Authorization: token ${GITHUB_TOKEN}" \
+ -H "Accept: application/vnd.github.v3+json" \
+ "https://api.github.com/repos/${REPO}/issues/${ISSUE_NUMBER}")
+
+ COMMENTS_COUNT=$(echo "$ISSUE_DATA" | jq -r '.comments')
+
+ # Get reactions data
+ REACTIONS_DATA=$(curl -s -H "Authorization: token ${GITHUB_TOKEN}" \
+ -H "Accept: application/vnd.github.v3+json" \
+ "https://api.github.com/repos/${REPO}/issues/${ISSUE_NUMBER}/reactions")
+
+ REACTIONS_COUNT=$(echo "$REACTIONS_DATA" | jq '. | length')
+
+ # Check if issue was closed automatically (by checking if closed_by is a bot)
+ CLOSED_AUTOMATICALLY="false"
+ if [[ "$CLOSED_BY" == *"[bot]"* ]]; then
+ CLOSED_AUTOMATICALLY="true"
+ fi
+
+ # Check if closed as duplicate by state_reason
+ CLOSED_AS_DUPLICATE="false"
+ if [ "$STATE_REASON" = "duplicate" ]; then
+ CLOSED_AS_DUPLICATE="true"
+ fi
+
+ # Prepare the event payload
+ EVENT_PAYLOAD=$(jq -n \
+ --arg issue_number "$ISSUE_NUMBER" \
+ --arg repo "$REPO" \
+ --arg title "$ISSUE_TITLE" \
+ --arg closed_by "$CLOSED_BY" \
+ --arg closed_at "$CLOSED_AT" \
+ --arg state_reason "$STATE_REASON" \
+ --arg comments_count "$COMMENTS_COUNT" \
+ --arg reactions_count "$REACTIONS_COUNT" \
+ --arg closed_automatically "$CLOSED_AUTOMATICALLY" \
+ --arg closed_as_duplicate "$CLOSED_AS_DUPLICATE" \
+ '{
+ events: [{
+ eventName: "github_issue_closed",
+ value: 1,
+ metadata: {
+ repository: $repo,
+ issue_number: ($issue_number | tonumber),
+ issue_title: $title,
+ closed_by: $closed_by,
+ closed_at: $closed_at,
+ state_reason: $state_reason,
+ comments_count: ($comments_count | tonumber),
+ reactions_count: ($reactions_count | tonumber),
+ closed_automatically: ($closed_automatically | test("true")),
+ closed_as_duplicate: ($closed_as_duplicate | test("true"))
+ },
+ time: (now | floor | tostring)
+ }]
+ }')
+
+ # Send to Statsig API
+ echo "Logging issue closure to Statsig for issue #${ISSUE_NUMBER}"
+
+ RESPONSE=$(curl -s -w "\n%{http_code}" -X POST https://events.statsigapi.net/v1/log_event \
+ -H "Content-Type: application/json" \
+ -H "STATSIG-API-KEY: ${STATSIG_API_KEY}" \
+ -d "$EVENT_PAYLOAD")
+
+ HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
+ BODY=$(echo "$RESPONSE" | head -n-1)
+
+ if [ "$HTTP_CODE" -eq 200 ] || [ "$HTTP_CODE" -eq 202 ]; then
+ echo "Successfully logged issue closure for issue #${ISSUE_NUMBER}"
+ echo "Closed by: $CLOSED_BY"
+ echo "Comments: $COMMENTS_COUNT"
+ echo "Reactions: $REACTIONS_COUNT"
+ echo "Closed automatically: $CLOSED_AUTOMATICALLY"
+ echo "Closed as duplicate: $CLOSED_AS_DUPLICATE"
+ else
+ echo "Failed to log issue closure for issue #${ISSUE_NUMBER}. HTTP ${HTTP_CODE}: ${BODY}"
+ fi
diff --git a/.github/workflows/weekly-metrics-discord.yml b/.github/workflows/weekly-metrics-discord.yml
new file mode 100644
index 00000000..8a638b36
--- /dev/null
+++ b/.github/workflows/weekly-metrics-discord.yml
@@ -0,0 +1,96 @@
+name: Weekly Metrics to Discord
+# description: Sends weekly metrics summary to Discord channel
+
+on:
+ schedule:
+ - cron: "0 9 * * 1" # Every Monday at 9 AM
+ workflow_dispatch:
+
+permissions:
+ contents: read
+ issues: write
+ pull-requests: read
+
+jobs:
+ weekly-metrics:
+ runs-on: ubuntu-latest
+ env:
+ DISCORD_WEBHOOK: ${{ secrets.DISCORD_METRICS_WEBHOOK }}
+ steps:
+ - name: Get dates for last week
+ run: |
+ # Last 7 days
+ first_day=$(date -d "7 days ago" +%Y-%m-%d)
+ last_day=$(date +%Y-%m-%d)
+
+ echo "first_day=$first_day" >> $GITHUB_ENV
+ echo "last_day=$last_day" >> $GITHUB_ENV
+ echo "week_of=$(date -d '7 days ago' +'Week of %B %d, %Y')" >> $GITHUB_ENV
+
+ - name: Generate issue metrics
+ uses: github/issue-metrics@v3
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ SEARCH_QUERY: "repo:${{ github.repository }} is:issue created:${{ env.first_day }}..${{ env.last_day }}"
+ HIDE_TIME_TO_ANSWER: true
+ HIDE_LABEL_METRICS: false
+
+ - name: Generate PR metrics
+ uses: github/issue-metrics@v3
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ SEARCH_QUERY: "repo:${{ github.repository }} is:pr created:${{ env.first_day }}..${{ env.last_day }}"
+ OUTPUT_FILE: pr_metrics.md
+
+ - name: Parse metrics
+ id: metrics
+ run: |
+ # Parse the metrics from the generated markdown files
+ if [ -f "issue_metrics.md" ]; then
+ # Extract key metrics using grep/awk
+ AVG_TIME_TO_FIRST_RESPONSE=$(grep -A 1 "Average time to first response" issue_metrics.md | tail -1 | xargs || echo "N/A")
+ AVG_TIME_TO_CLOSE=$(grep -A 1 "Average time to close" issue_metrics.md | tail -1 | xargs || echo "N/A")
+ NUM_ISSUES_CREATED=$(grep -oP '\d+(?= issues created)' issue_metrics.md || echo "0")
+ NUM_ISSUES_CLOSED=$(grep -oP '\d+(?= issues closed)' issue_metrics.md || echo "0")
+ fi
+
+ if [ -f "pr_metrics.md" ]; then
+ PR_AVG_TIME_TO_MERGE=$(grep -A 1 "Average time to close" pr_metrics.md | tail -1 | xargs || echo "N/A")
+ NUM_PRS_CREATED=$(grep -oP '\d+(?= pull requests created)' pr_metrics.md || echo "0")
+ NUM_PRS_MERGED=$(grep -oP '\d+(?= pull requests closed)' pr_metrics.md || echo "0")
+ fi
+
+ # Set outputs for Discord action
+ echo "issues_created=${NUM_ISSUES_CREATED:-0}" >> $GITHUB_OUTPUT
+ echo "issues_closed=${NUM_ISSUES_CLOSED:-0}" >> $GITHUB_OUTPUT
+ echo "prs_created=${NUM_PRS_CREATED:-0}" >> $GITHUB_OUTPUT
+ echo "prs_merged=${NUM_PRS_MERGED:-0}" >> $GITHUB_OUTPUT
+ echo "avg_first_response=${AVG_TIME_TO_FIRST_RESPONSE:-N/A}" >> $GITHUB_OUTPUT
+ echo "avg_time_to_close=${AVG_TIME_TO_CLOSE:-N/A}" >> $GITHUB_OUTPUT
+ echo "pr_avg_merge_time=${PR_AVG_TIME_TO_MERGE:-N/A}" >> $GITHUB_OUTPUT
+
+ - name: Send to Discord
+ uses: sarisia/actions-status-discord@v1
+ if: env.DISCORD_WEBHOOK != ''
+ with:
+ webhook: ${{ env.DISCORD_WEBHOOK }}
+ status: Success
+ title: "📊 Weekly Metrics Report"
+ description: |
+ **${{ env.week_of }}**
+
+ **🎯 Issues**
+ • Created: ${{ steps.metrics.outputs.issues_created }}
+ • Closed: ${{ steps.metrics.outputs.issues_closed }}
+
+ **🔀 Pull Requests**
+ • Created: ${{ steps.metrics.outputs.prs_created }}
+ • Merged: ${{ steps.metrics.outputs.prs_merged }}
+
+ **⏱️ Response Times**
+ • First Response: ${{ steps.metrics.outputs.avg_first_response }}
+ • Time to Close: ${{ steps.metrics.outputs.avg_time_to_close }}
+ • PR Merge Time: ${{ steps.metrics.outputs.pr_avg_merge_time }}
+ color: 0x58AFFF
+ username: Task Master Metrics Bot
+ avatar_url: https://raw.githubusercontent.com/eyaltoledano/claude-task-master/main/images/logo.png