feat: update dedupe backfill script to filter by issue number

Replace date-based filtering with issue number filtering to only process issues older than #4050. This provides more precise control over which issues are processed for duplicate detection backfill.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Boris Cherny
2025-08-15 16:26:38 -07:00
parent fc8c10995f
commit 20ba9a34a5

View File

@@ -82,46 +82,61 @@ Usage:
Environment Variables:
GITHUB_TOKEN - GitHub personal access token with repo and actions permissions (required)
DRY_RUN - Set to "false" to actually trigger workflows (default: true for safety)
DAYS_BACK - How many days back to look for old issues (default: 90)`);
MAX_ISSUE_NUMBER - Only process issues with numbers less than this value (default: 4050)`);
}
console.log("[DEBUG] GitHub token found");
const owner = "anthropics";
const repo = "claude-code";
const dryRun = process.env.DRY_RUN !== "false";
const daysBack = parseInt(process.env.DAYS_BACK || "90", 10);
const maxIssueNumber = parseInt(process.env.MAX_ISSUE_NUMBER || "4050", 10);
const minIssueNumber = parseInt(process.env.MIN_ISSUE_NUMBER || "1", 10);
console.log(`[DEBUG] Repository: ${owner}/${repo}`);
console.log(`[DEBUG] Dry run mode: ${dryRun}`);
console.log(`[DEBUG] Looking back ${daysBack} days`);
console.log(`[DEBUG] Looking at issues between #${minIssueNumber} and #${maxIssueNumber}`);
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - daysBack);
console.log(`[DEBUG] Fetching issues created since ${cutoffDate.toISOString()}...`);
console.log(`[DEBUG] Fetching issues between #${minIssueNumber} and #${maxIssueNumber}...`);
const allIssues: GitHubIssue[] = [];
let page = 1;
const perPage = 100;
while (true) {
const pageIssues: GitHubIssue[] = await githubRequest(
`/repos/${owner}/${repo}/issues?state=all&per_page=${perPage}&page=${page}&since=${cutoffDate.toISOString()}`,
`/repos/${owner}/${repo}/issues?state=all&per_page=${perPage}&page=${page}&sort=created&direction=desc`,
token
);
if (pageIssues.length === 0) break;
allIssues.push(...pageIssues);
// Filter to only include issues within the specified range
const filteredIssues = pageIssues.filter(issue =>
issue.number >= minIssueNumber && issue.number < maxIssueNumber
);
allIssues.push(...filteredIssues);
// If the oldest issue in this page is still above our minimum, we need to continue
// but if the oldest issue is below our minimum, we can stop
const oldestIssueInPage = pageIssues[pageIssues.length - 1];
if (oldestIssueInPage && oldestIssueInPage.number >= maxIssueNumber) {
console.log(`[DEBUG] Oldest issue in page #${page} is #${oldestIssueInPage.number}, continuing...`);
} else if (oldestIssueInPage && oldestIssueInPage.number < minIssueNumber) {
console.log(`[DEBUG] Oldest issue in page #${page} is #${oldestIssueInPage.number}, below minimum, stopping`);
break;
} else if (filteredIssues.length === 0 && pageIssues.length > 0) {
console.log(`[DEBUG] No issues in page #${page} are in range #${minIssueNumber}-#${maxIssueNumber}, continuing...`);
}
page++;
// Safety limit to avoid infinite loops
if (page > 100) {
if (page > 200) {
console.log("[DEBUG] Reached page limit, stopping pagination");
break;
}
}
console.log(`[DEBUG] Found ${allIssues.length} issues from the last ${daysBack} days`);
console.log(`[DEBUG] Found ${allIssues.length} issues between #${minIssueNumber} and #${maxIssueNumber}`);
let processedCount = 0;
let candidateCount = 0;