feat(eng): add contributor reporting and management scripts

- Add eng/README.md documenting maintainer utilities
- Add eng/contributor-report.mjs for generating contributor reports
- Add eng/add-missing-contributors.mjs for automating contributor additions
- Add eng/utils/graceful-shutdown.mjs for script lifecycle management
- Update eng/update-readme.mjs with minor fixes
- Update package.json with new contributor scripts

Generated-by: GitHub Copilot <copilot@github.com>
Signed-off-by: Ashley Childress <6563688+anchildress1@users.noreply.github.com>
This commit is contained in:
Ashley Childress
2025-12-19 20:43:27 -05:00
parent d929b71898
commit 1322aa2dde
10 changed files with 2634 additions and 799 deletions

36
eng/README.md Normal file
View File

@@ -0,0 +1,36 @@
# Contributor Reporting (Maintainers) 🚧
This directory contains a lightweight helper to generate human-readable reports about missing contributors.
- `contributor-report.mjs` — generates a markdown report of merged PRs for missing contributors (includes shared helpers).
- `add-missing-contributors.mjs` — on-demand maintainer script to automatically add missing contributors to `.all-contributorsrc` (infers contribution types from merged PR files, then runs the all-contributors CLI).
## Key notes for maintainers
- Reports are generated on-demand and output to `reports/contributor-report.md` for human review.
- The report output is intentionally minimal: a single list of affected PRs and one command to add missing contributor(s).
- This repository requires full git history for accurate analysis. In CI, set `fetch-depth: 0`.
- Link: [all-contributors CLI documentation](https://allcontributors.org/docs/en/cli)
## On-demand scripts (not CI)
These are maintainer utilities. They are intentionally on-demand only (but could be wired into CI later).
### `add-missing-contributors.mjs`
- Purpose: detect missing contributors, infer contribution types from their merged PR files, and run `npx all-contributors add ...` to update `.all-contributorsrc`.
- Requirements:
- GitHub CLI (`gh`) available (used to query merged PRs).
- `.all-contributorsrc` exists.
- Auth token set to avoid the anonymous GitHub rate limits:
- Set `GITHUB_TOKEN` (preferred), or `GH_TOKEN` for the `gh` CLI.
- If you use `PRIVATE_TOKEN` locally, `contributor-report.mjs` will map it to `GITHUB_TOKEN`.
## Graceful shutdown
- `contributor-report.mjs` calls `setupGracefulShutdown('script-name')` from `eng/utils/graceful-shutdown.mjs` early in the file to attach signal/exception handlers.
## Testing & maintenance
- Helper functions have small, deterministic behavior and include JSDoc comments.
- The `getMissingContributors` function in `contributor-report.mjs` is the single source of truth for detecting missing contributors from `all-contributors check` output.

View File

@@ -0,0 +1,306 @@
/**
* One-time contributor detection and addition script.
* Discovers missing contributors, determines their contribution types from repo history,
* and updates .all-contributorsrc via the all-contributors CLI.
*
* Usage: node add-missing-contributors.mjs
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import path from 'node:path';
import {
getContributionTypes,
getMissingContributors,
fetchContributorMergedPrs
} from './contributor-report.mjs';
import { setupGracefulShutdown } from './utils/graceful-shutdown.mjs';
const DEFAULT_CMD_TIMEOUT = 30_000; // 30 seconds
setupGracefulShutdown('add-missing-contributors');
/**
* Get all files touched by a contributor from their merged PRs.
* @param {string} username
* @returns {string[]}
*/
const getContributorFiles = (username) => {
try {
console.log(`📁 Getting files for contributor: ${username}`);
const prs = fetchContributorMergedPrs(username, { includeAllFiles: true });
if (prs.length === 0) {
console.log(`📭 No merged PRs found for ${username}`);
return [];
}
const files = new Set();
for (const pr of prs) {
for (const file of pr.files || []) {
if (file?.path) {
files.add(file.path);
}
}
}
const fileList = Array.from(files);
console.log(`📄 Found ${fileList.length} unique files for ${username}: ${fileList.slice(0, 3).join(', ')}${fileList.length > 3 ? '...' : ''}`);
return fileList;
} catch (error) {
console.error(`❌ Error getting files for ${username}:`, error.message);
return [];
}
};
/**
* Determine contribution types from a contributor's files.
* @param {string} username
* @returns {string}
*/
const analyzeContributor = (username) => {
try {
console.log(`🔍 Analyzing contribution types for: ${username}`);
const files = getContributorFiles(username);
if (files.length === 0) {
console.log(`💡 No files found for ${username}, using 'code' fallback`);
return 'code';
}
const contributionTypes = getContributionTypes(files);
if (!contributionTypes || contributionTypes.trim() === '') {
console.log(`💡 No matching types found for ${username}, using 'code' fallback`);
return 'code';
}
console.log(`✅ Determined types for ${username}: ${contributionTypes}`);
return contributionTypes;
} catch (error) {
console.error(`❌ Error analyzing files for ${username}:`, error.message);
return 'code';
}
};
/**
* Add a username to the ignore list in .all-contributorsrc.
* @param {string} username
* @returns {boolean}
*/
const addToIgnoreList = (username) => {
try {
const configPath = path.join(process.cwd(), '.all-contributorsrc');
const config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
const ignoreList = config.ignoreList || config.ignore || [];
if (!ignoreList.includes(username)) {
ignoreList.push(username);
config.ignoreList = ignoreList;
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
console.warn(`⚠️ Added ${username} to ignore list (user not found on GitHub)`);
return true;
}
return false;
} catch (error) {
console.error(`❌ Failed to add ${username} to ignore list:`, error.message);
return false;
}
};
/**
* Run the all-contributors CLI to add a contributor to the project.
* @param {string} username
* @param {string} types
* @returns {boolean}
*/
const addContributor = (username, types) => {
try {
console.log(` Adding contributor: ${username} with types: ${types}`);
const command = `npx all-contributors add ${username} ${types}`;
execSync(command, {
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe'],
timeout: DEFAULT_CMD_TIMEOUT
});
return true;
} catch (error) {
// System-level errors that should propagate up
if (error.message.includes('rate limit') || error.message.includes('403')) {
console.error(`⏱️ Rate limit encountered while adding ${username}.`);
throw error;
}
if (error.message.includes('network') || error.message.includes('timeout')) {
console.error(`🌐 Network error while adding ${username}.`);
throw error;
}
// User-specific errors that can be skipped
if (error.message.includes('404') || error.message.includes('not found')) {
addToIgnoreList(username);
console.error(`❌ User ${username} not found, added to ignore list`);
return false;
}
// Unknown error - log and skip
console.error(`❌ Failed to add contributor ${username}:`, error.message);
return false;
}
};
/**
* Process a single missing contributor: detect types and add via all-contributors CLI.
* @param {string} username
* @returns {{added:number, failed:number}}
*/
const processContributor = async (username) => {
let added = 0;
let failed = 0;
try {
console.log(`📊 Step 2: Analyzing contribution types for ${username}...`);
const contributionTypes = analyzeContributor(username);
console.log(` Step 3: Adding ${username} with types: ${contributionTypes}...`);
const success = addContributor(username, contributionTypes);
if (success) {
added++;
console.log(`✅ Successfully processed ${username}`);
} else {
failed++;
console.log(`❌ Failed to process ${username}`);
}
} catch (error) {
failed++;
console.error(`💥 Error processing ${username}:`, error.message);
}
return { added, failed };
};
/**
* Main entry point: detect and add missing contributors.
*/
const main = async () => {
console.log('🚀 Starting add missing contributors script');
console.log('='.repeat(50));
try {
console.log('\n📋 Step 1: Detecting missing contributors...');
const missingContributors = getMissingContributors();
if (missingContributors.length === 0) {
console.log('🎉 No missing contributors found! All contributors are properly recognized.');
return { processed: 0, added: 0, failed: 0 };
}
console.log(`\n🔄 Processing ${missingContributors.length} missing contributors...`);
let processed = 0;
let added = 0;
let failed = 0;
for (const username of missingContributors) {
console.log(`\n${'─'.repeat(30)}`);
console.log(`👤 Processing contributor: ${username}`);
processed++;
try {
const { added: deltaAdded, failed: deltaFailed } = await processContributor(username);
added += deltaAdded;
failed += deltaFailed;
} catch (error) {
// Re-throw system-level errors (rate limit, network, SIGINT)
console.error(`💥 System error processing ${username}:`, error.message);
throw error;
}
}
return { processed, added, failed };
} catch (error) {
console.error('\n💥 Fatal error in main execution:', error.message);
console.error('🛑 Script execution stopped');
throw error;
}
};
/**
* Print a summary report of the run.
* @param {{processed:number, added:number, failed:number}} results
*/
const printSummaryReport = (results) => {
const { processed, added, failed } = results;
console.log('\n' + '='.repeat(50));
console.log('📊 EXECUTION SUMMARY');
console.log('='.repeat(50));
console.log(`📋 Total contributors processed: ${processed}`);
console.log(`✅ Successfully added: ${added}`);
console.log(`❌ Failed to add: ${failed}`);
if (processed === 0) {
console.log('\n🎉 SUCCESS: No missing contributors found - all contributors are properly recognized!');
} else if (failed === 0) {
console.log('\n🎉 SUCCESS: All missing contributors have been successfully added!');
console.log('💡 Next steps: Review the updated .all-contributorsrc file and commit the changes.');
} else if (added > 0) {
console.log('\n⚠ PARTIAL SUCCESS: Some contributors were added, but some failed.');
console.log(`💡 ${added} contributors were successfully added.`);
console.log(`🔄 ${failed} contributors failed - check the error messages above for details.`);
console.log('💡 You may want to run the script again to retry failed contributors.');
} else {
console.log('\n❌ FAILURE: No contributors could be added.');
console.log('💡 Check the error messages above for troubleshooting guidance.');
console.log('💡 Common issues: missing GITHUB_TOKEN, network problems, or API rate limits.');
}
console.log('\n📝 ACTIONABLE NEXT STEPS:');
if (added > 0) {
console.log('• Review the updated .all-contributorsrc file');
console.log('• Commit and push the changes to update the README');
console.log('• Consider running "npm run contributors:generate" to update the README');
}
if (failed > 0) {
console.log('• Check error messages above for specific failure reasons');
console.log('• Verify GITHUB_TOKEN is set and has appropriate permissions');
console.log('• Consider running the script again after resolving issues');
}
if (processed === 0) {
console.log('• No action needed - all contributors are already recognized!');
}
console.log('\n' + '='.repeat(50));
};
if (process.argv[1] === (new URL(import.meta.url)).pathname) {
try {
const results = await main();
printSummaryReport(results);
if (results.failed > 0 && results.added === 0) {
process.exit(1);
} else if (results.failed > 0) {
process.exit(2);
} else {
process.exit(0);
}
} catch (error) {
console.error('\n💥 Script execution failed:', error.message);
console.log('\n📝 TROUBLESHOOTING TIPS:');
console.log('• Ensure you are in a git repository');
console.log('• Verify all-contributors-cli is installed');
console.log('• Check that .all-contributorsrc file exists');
console.log('• Ensure GITHUB_TOKEN environment variable is set');
process.exit(1);
}
}

597
eng/contributor-report.mjs Normal file
View File

@@ -0,0 +1,597 @@
/**
* Generate human-readable reports about missing contributors.
* This module queries merged PRs via 'gh' and produces a markdown report.
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import path from 'node:path';
import { setupGracefulShutdown } from './utils/graceful-shutdown.mjs';
const DEFAULT_CMD_TIMEOUT = 30_000; // 30s
setupGracefulShutdown('contributor-report');
/**
* Patterns that represent generated files; contributors should not be credited
* for these files because they are not substantive authored content.
*/
export const AUTO_GENERATED_PATTERNS = [
'README.md',
'README.*.md',
'collections/*.md',
'collections/*.collection.md',
'docs/README.*.md',
'docs/*.generated.md'
];
/**
* File globs used to infer contribution types from file paths.
*/
export const TYPE_PATTERNS = {
instructions: [
'instructions/*.instructions.md'
],
prompts: [
'prompts/*.prompt.md'
],
agents: [
'chatmodes/*.chatmode.md',
'agents/*.agent.md'
],
collections: [
'collections/*.collection.yml'
],
doc: [
'docs/**/*.md',
'.github/**/*.md',
'CONTRIBUTING.md',
'SECURITY.md',
'SUPPORT.md',
'LICENSE.md',
'CHANGELOG.md',
'*.md'
],
infra: [
'.github/workflows/**/*.yml',
'.github/workflows/**/*.yaml',
'**/*.yml',
'**/*.yaml'
],
maintenance: [
'package*.json',
'*.config.js',
'tsconfig*.json'
],
code: [
'**/*.{js,ts,mjs,cjs}',
'**/*.py'
]
};
const globCache = new Map();
/**
* Convert a simple glob (with *, **) to a RegExp.
* This is intentionally small and deterministic for our repo patterns.
* @param {string} pattern
* @returns {RegExp}
*/
export const globToRegExp = (pattern) => {
const DOUBLE_WILDCARD_PLACEHOLDER = '§§DOUBLE§§';
const replacements = [
{ pattern: /\\/g, replacement: '/' },
{ pattern: /\./g, replacement: String.raw`\.` },
{ pattern: /\*\*/g, replacement: DOUBLE_WILDCARD_PLACEHOLDER },
{ pattern: /\*/g, replacement: '[^/]*' },
{ pattern: new RegExp(DOUBLE_WILDCARD_PLACEHOLDER, 'g'), replacement: '.*' },
{ pattern: /\?/g, replacement: '.' },
{ pattern: /\//g, replacement: String.raw`\/` }
];
const normalized = replacements.reduce((acc, { pattern, replacement }) => acc.replace(pattern, replacement), String(pattern));
return new RegExp(`^${normalized}$`);
};
/**
* Test whether a file path matches a glob pattern.
* @param {string} filePath
* @param {string} pattern
* @returns {boolean}
*/
export const matchGlob = (filePath, pattern) => {
if (!globCache.has(pattern)) {
try {
globCache.set(pattern, globToRegExp(pattern));
} catch {
globCache.set(pattern, null);
}
}
const regexp = globCache.get(pattern);
if (!regexp) {
return false;
}
const normalized = filePath.replace(/\\/g, '/');
return regexp.test(normalized);
};
/**
* Return true if the given path matches one of the known auto-generated patterns.
* @param {string} filePath
* @returns {boolean}
*/
export const isAutoGeneratedFile = (filePath) => {
return AUTO_GENERATED_PATTERNS.some((pattern) => matchGlob(filePath, pattern));
};
/**
* Infer a contribution type string (e.g. 'prompts', 'agents', 'doc') for a file path.
* Returns null if no specific type matched.
* @param {string} filePath
* @returns {string|null}
*/
export const getFileContributionType = (filePath) => {
const normalized = filePath.replace(/\\/g, '/');
for (const [type, patterns] of Object.entries(TYPE_PATTERNS)) {
if (patterns.some((pattern) => matchGlob(normalized, pattern))) {
return type;
}
}
return null;
};
/**
* Derive a comma-separated list of contribution type identifiers from a list of files.
* Auto-generated files are ignored. Returns '' when no files to process.
* @param {string[]} files
* @returns {string}
*/
export const getContributionTypes = (files) => {
const types = new Set();
let processed = 0;
for (const file of files) {
if (isAutoGeneratedFile(file)) {
continue;
}
processed += 1;
const type = getFileContributionType(file);
if (type) {
types.add(type);
}
}
if (processed === 0) {
return '';
}
if (types.size === 0) {
types.add('code');
}
return Array.from(types).sort((a, b) => a.localeCompare(b)).join(',');
};
/**
* Check .all-contributors output to discover missing contributors.
* This is the canonical implementation used by contributor tooling.
* @returns {string[]}
*/
export const getMissingContributors = () => {
try {
console.log('🔍 Checking for missing contributors...');
const configPath = path.join(process.cwd(), '.all-contributorsrc');
const config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
const ignoreEntries = config.ignoreList || config.ignore || [];
const ignoreSet = new Set(ignoreEntries.map((entry) => entry.toLowerCase()));
if (ignoreSet.size > 0) {
console.log(`📋 Loaded ignore list: ${Array.from(ignoreSet).join(', ')}`);
}
const output = execSync('npx all-contributors check', {
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe'],
timeout: DEFAULT_CMD_TIMEOUT
});
const lines = output.split('\n');
const headerLineIndex = lines.findIndex(line =>
line.includes('Missing contributors in .all-contributorsrc:')
);
if (headerLineIndex === -1) {
console.log('✅ No missing contributors found');
return [];
}
let contributorsLine = '';
for (let i = headerLineIndex + 1; i < lines.length; i++) {
const line = lines[i].trim();
if (line.includes('Unknown contributors') || line.includes('✨')) {
break;
}
if (line && !line.startsWith('⠙') && !line.startsWith('✨')) {
contributorsLine = line;
break;
}
}
if (!contributorsLine) {
console.log('✅ No missing contributors found');
return [];
}
const allUsernames = contributorsLine
.split(',')
.map(username => username.trim())
.filter(username => username.length > 0);
const filteredUsernames = allUsernames.filter(username => {
const lowerUsername = username.toLowerCase();
if (ignoreSet.has(lowerUsername)) {
console.log(`⏭️ FILTERED: ${username} is in ignore list`);
return false;
}
return true;
});
console.log(`📋 Found ${filteredUsernames.length} missing contributors after filtering: ${filteredUsernames.join(', ')}`);
return filteredUsernames;
} catch (error) {
console.error('❌ Error checking for missing contributors:', error.message);
if (error.message.includes('command not found') || error.message.includes('not recognized')) {
console.error('💡 Make sure all-contributors-cli is installed: npm install all-contributors-cli');
}
return [];
}
};
// --- REPORT GENERATION LOGIC ---
/**
* Get the current GitHub repository in owner/repo format.
* Tries upstream first, then origin.
* @returns {string}
*/
const getGitHubRepo = () => {
try {
const upstreamUrl = execSync('git config --get remote.upstream.url', {
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe']
}).trim();
if (upstreamUrl) {
const match = upstreamUrl.match(/github\.com:([^/]+)\/([^/]+?)(?:\.git)?$/);
if (match) return `${match[1]}/${match[2]}`;
}
} catch (e) {
console.debug('upstream not found, trying origin');
}
try {
const originUrl = execSync('git config --get remote.origin.url', {
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe']
}).trim();
const match = originUrl.match(/github\.com:([^/]+)\/([^/]+?)(?:\.git)?$/);
if (match) return `${match[1]}/${match[2]}`;
} catch (e) {
console.debug('origin not found, using default');
}
return 'github/awesome-copilot';
};
const CONTRIBUTION_TYPE_MAP = {
'instructions': { symbol: '🧭', description: 'The big AI prompt recipes (Copilot instruction sets)' },
'prompts': { symbol: '⌨️', description: 'One-shot or reusable user-level prompts' },
'agents': { symbol: '🎭', description: 'Defined Copilot personalities / roles' },
'collections': { symbol: '🎁', description: 'Bundled thematic sets (e.g., "Copilot for Docs")' }
};
/**
* Fetch merged PRs for a GitHub username using the GH CLI and filter files.
* @param {string} username
* @param {{includeAllFiles?:boolean}} [opts]
* @returns {Array<object>} Array of PR objects
*/
export const fetchContributorMergedPrs = (username, { includeAllFiles = false } = {}) => {
try {
const repo = getGitHubRepo();
const result = execSync(
`gh pr list --repo ${repo} --state merged --author ${username} --json number,title,mergedAt,files,url --limit 100`,
{ encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'], timeout: DEFAULT_CMD_TIMEOUT }
);
const prs = JSON.parse(result);
if (includeAllFiles) {
return prs;
}
return prs.filter(pr => {
const hasNonConfigFiles = pr.files.some(file =>
!isAutoGeneratedFile(file.path)
);
return hasNonConfigFiles;
});
} catch (error) {
console.error(`Failed to fetch PRs for ${username}:`, error.message);
return [];
}
};
/**
* Convert a PR object into a normalized report entry with types and file details.
* @param {{login:string}} contributor
* @param {object} pr
* @param {{includeAllFiles?:boolean}} [opts]
* @returns {object|null}
*/
const generatePRReport = (contributor, pr, { includeAllFiles = false } = {}) => {
const types = new Set();
const fileDetails = [];
for (const file of pr.files) {
if (!file?.path) {
continue;
}
// Include generated files only if includeAllFiles is true
if (!includeAllFiles && isAutoGeneratedFile(file.path)) {
continue;
}
const type = getFileContributionType(file.path) || 'ideas';
if (type) {
types.add(type);
}
fileDetails.push({
path: file.path,
type: type || 'unknown',
additions: file.additions,
deletions: file.deletions
});
}
// If no non-filtered files contributed to types, and we're not asked for all files, skip this PR
if (types.size === 0 && !includeAllFiles) {
return null;
}
// Fallback to 'code' if no types detected
if (types.size === 0) {
types.add('code');
}
const typeList = Array.from(types);
return {
prNumber: pr.number,
prTitle: pr.title,
prUrl: pr.url,
mergedAt: pr.mergedAt,
contributionTypes: typeList,
files: fileDetails,
commentSnippet: `@all-contributors please add @${contributor.login} for ${typeList.join(', ')}`
};
};
/**
* Build a contributor report by inspecting merged PRs and mapping files to types.
* Returns null when no relevant PRs were found (unless includeAllFiles is true).
* @param {string} username
* @param {{includeAllFiles?:boolean}} [opts]
* @returns {object|null}
*/
export const generateContributorReport = (username, { includeAllFiles = false } = {}) => {
console.log(`Inspecting ${username}...`);
const prs = fetchContributorMergedPrs(username, { includeAllFiles });
const prReports = prs
.map(pr => generatePRReport({ login: username }, pr, { includeAllFiles }))
.filter(report => report !== null);
// If no relevant PR reports and not explicitly including all files, skip the contributor entirely
if (prReports.length === 0 && !includeAllFiles) {
return null;
}
return {
username,
totalPRs: prs.length,
prs: prReports
};
};
/**
* Render a set of contributor reports as markdown for human review.
* @param {Array<object>} reports
* @param {number} missingCount - number of missing contributors detected
* @returns {string}
*/
export const generateMarkdownReport = (reports, missingCount = 0) => {
// The report is intentionally minimal: a single list of affected PRs and
// a single copy/paste command maintainers can run locally.
// No timestamps, per-file breakdowns, or duplicated metadata.
if (!missingCount) {
return 'No missing contributors detected.\n';
}
// 1) Single list of affected PRs (deduped).
const prEntries = new Map(); // key=prNumber or url, value={number,url,mergedAt}
for (const report of reports) {
for (const pr of report.prs) {
const key = pr.prUrl || String(pr.prNumber);
if (!prEntries.has(key)) {
prEntries.set(key, {
number: pr.prNumber,
url: pr.prUrl,
mergedAt: pr.mergedAt
});
}
}
}
const prList = Array.from(prEntries.values()).sort((a, b) => {
// Prefer chronological sort for stable “what happened” review.
const aTime = a.mergedAt ? Date.parse(a.mergedAt) : 0;
const bTime = b.mergedAt ? Date.parse(b.mergedAt) : 0;
if (aTime !== bTime) return aTime - bTime;
return (a.number ?? 0) - (b.number ?? 0);
});
// 2) One command (one line). If multiple users are missing, chain them.
const commandParts = [];
for (const report of reports) {
const typeSet = new Set();
for (const pr of report.prs) {
for (const type of pr.contributionTypes || []) {
typeSet.add(type);
}
}
const types = Array.from(typeSet).filter(Boolean).sort((a, b) => a.localeCompare(b));
const typesArg = types.length > 0 ? types.join(',') : 'code';
commandParts.push(`npx all-contributors add ${report.username} ${typesArg}`);
}
let markdown = '';
markdown += prList.map((pr) => `- #${pr.number} ${pr.url}`).join('\n');
markdown += '\n\n';
markdown += commandParts.join(' && ');
markdown += '\n';
return markdown;
};
/**
* Check whether a PR already contains an all-contributors bot comment.
* @param {number} prNumber
* @returns {boolean}
*/
export const hasExistingAllContributorsComment = (prNumber) => {
try {
const repo = getGitHubRepo();
const json = execSync(`gh pr view ${prNumber} --repo ${repo} --json comments`, {
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe'],
timeout: DEFAULT_CMD_TIMEOUT
});
const data = JSON.parse(json);
const comments = data?.comments?.nodes || data?.comments || [];
return comments.some((comment) => comment?.body?.includes(`@all-contributors`));
} catch (error) {
console.warn(`⚠️ Unable to inspect comments for PR #${prNumber}: ${error.message}`);
return false;
}
};
/**
* Post a comment to a PR using the GH CLI.
* @param {number} prNumber
* @param {string} body
* @returns {boolean}
*/
export const postCommentOnPr = (prNumber, body) => {
try {
const repo = getGitHubRepo();
execSync(`gh pr comment ${prNumber} --repo ${repo} --body "${body.replace(/"/g, '\\"')}"`, {
encoding: 'utf8',
stdio: ['pipe', 'inherit', 'inherit'],
timeout: DEFAULT_CMD_TIMEOUT
});
console.log(`💬 Posted recommendation comment on PR #${prNumber}`);
return true;
} catch (error) {
console.warn(`⚠️ Failed to post comment on PR #${prNumber}: ${error.message}`);
return false;
}
};
/**
* Post suggested all-contributors comments to PRs for a collection of reports.
* @param {Array<object>} reports
*/
export const autoAddCommentsToReports = (reports) => {
for (const report of reports) {
for (const pr of report.prs) {
if (hasExistingAllContributorsComment(pr.prNumber)) {
console.log(`💬 Skipping PR #${pr.prNumber} for @${report.username} — comment already present`);
continue;
}
const types = pr.contributionTypes.map(t => '`' + t + '`').join(', ');
const commentLines = [
`Thanks for the contribution @${report.username}!`,
'',
`We detected contribution categories for this PR: ${types || '`code`'}.`,
'',
`@all-contributors please add @${report.username} for ${pr.contributionTypes.join(', ')}`
];
const body = commentLines.join('\n');
postCommentOnPr(pr.prNumber, body);
}
}
};
const main = () => {
try {
const ghToken = process.env.GITHUB_TOKEN || process.env.PRIVATE_TOKEN;
if (!ghToken) {
console.error('❌ GITHUB_TOKEN or PRIVATE_TOKEN environment variable is required for GitHub CLI operations');
process.exit(1);
}
// gh CLI only reads GITHUB_TOKEN or GH_TOKEN, so ensure it's set
if (process.env.PRIVATE_TOKEN && !process.env.GITHUB_TOKEN) {
process.env.GITHUB_TOKEN = process.env.PRIVATE_TOKEN;
}
const args = new Set(process.argv.slice(2));
const autoAdd = args.has('--auto-add-pr-comments');
const includeAllFiles = args.has('--include-all-pr-files');
const contributors = getMissingContributors();
console.log(`Inspecting ${contributors.length} missing contributors...\n`);
const reports = [];
for (const contributor of contributors) {
const report = generateContributorReport(contributor, { includeAllFiles });
reports.push(report || { username: contributor, totalPRs: 0, prs: [] });
}
const markdown = generateMarkdownReport(reports, contributors.length);
const outputPath = path.join(process.cwd(), 'reports', 'contributor-report.md');
fs.writeFileSync(outputPath, markdown);
console.log(`Report saved to: ${outputPath}`);
if (autoAdd) {
autoAddCommentsToReports(reports);
}
} catch (error) {
console.error('Error generating report:', error);
process.exit(1);
}
};
if (process.argv[1] === (new URL(import.meta.url)).pathname) {
main();
}

View File

@@ -0,0 +1,60 @@
/**
* Lightweight graceful shutdown helper for one-off scripts.
*
* Call setupGracefulShutdown('script-name') early in your script to attach
* signal and exception handlers that exit the process cleanly.
*
* @param {string} name - Human readable name for log messages
* @param {{exitCode?:number}} [opts]
* @returns {() => void} teardown function to remove handlers (useful in tests)
*/
export const setupGracefulShutdown = (name, { exitCode = 1 } = {}) => {
let _shuttingDown = false;
const cleanup = (signal) => {
if (_shuttingDown) return;
_shuttingDown = true;
console.log(`\n🛑 ${name}: received ${signal}, shutting down gracefully...`);
// Best-effort cleanup: keep this short and synchronous
try {
// Place for lightweight cleanup tasks if needed in future
} catch (e) {
console.error(`${name}: error during shutdown cleanup:`, e);
}
// Exit with a non-zero code to indicate abnormal termination
try {
process.exit(exitCode);
} catch (e) {
// process.exit may not be desirable in some test harnesses; swallow errors
console.warn(`${name}: process.exit failed:`, e?.message);
}
};
const onSigInt = () => cleanup('SIGINT');
const onSigTerm = () => cleanup('SIGTERM');
const onSigHup = () => cleanup('SIGHUP');
const onUncaught = (err) => {
console.error(`${name}: Uncaught exception:`, err);
cleanup('uncaughtException');
};
const onUnhandledRejection = (reason) => {
console.error(`${name}: Unhandled promise rejection:`, reason);
cleanup('unhandledRejection');
};
process.on('SIGINT', onSigInt);
process.on('SIGTERM', onSigTerm);
process.on('SIGHUP', onSigHup);
process.on('uncaughtException', onUncaught);
process.on('unhandledRejection', onUnhandledRejection);
// Return a teardown function useful for tests or if a caller wants to remove handlers
return () => {
process.removeListener('SIGINT', onSigInt);
process.removeListener('SIGTERM', onSigTerm);
process.removeListener('SIGHUP', onSigHup);
process.removeListener('uncaughtException', onUncaught);
process.removeListener('unhandledRejection', onUnhandledRejection);
};
};