Compare commits
1 Commits
metrics-co
...
bus-factor
| Author | SHA1 | Date | |
|---|---|---|---|
| 066d50fa89 |
213
scripts/bus-factor.mjs
Normal file
213
scripts/bus-factor.mjs
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* Bus-Factor Analyzer
|
||||||
|
* Analyzes code ownership concentration by examining git commit history.
|
||||||
|
* Usage: node scripts/bus-factor.js [--json] [--min-commits N] [--threshold N] [--top N]
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync } from 'child_process';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
// --- Pure analysis functions (exported for testing) ---
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse raw `git log --numstat` output into a map of file -> author -> commitCount.
|
||||||
|
* @param {string} rawLog - Output from git log --numstat
|
||||||
|
* @returns {Object} { [filePath]: { [author]: number } }
|
||||||
|
*/
|
||||||
|
export function parseGitLog(rawLog) {
|
||||||
|
const ownership = {};
|
||||||
|
const lines = rawLog.split('\n');
|
||||||
|
let currentAuthor = null;
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
// Commit header line: "commit <hash>"
|
||||||
|
if (line.startsWith('commit ')) {
|
||||||
|
currentAuthor = null;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Author line: "Author: Name <email>"
|
||||||
|
const authorMatch = line.match(/^Author:\s+(.+?)\s+<[^>]+>/);
|
||||||
|
if (authorMatch) {
|
||||||
|
currentAuthor = authorMatch[1].trim();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Numstat line: "<added>\t<deleted>\t<filename>"
|
||||||
|
if (currentAuthor && /^\d+\t\d+\t/.test(line)) {
|
||||||
|
const parts = line.split('\t');
|
||||||
|
if (parts.length < 3) continue;
|
||||||
|
|
||||||
|
// Handle rename: "old/path => new/path" or "{old => new}/suffix"
|
||||||
|
let filePath = parts[2];
|
||||||
|
if (filePath.includes('{') && filePath.includes('=>')) {
|
||||||
|
filePath = filePath.replace(/\{([^}]*?)\s*=>\s*([^}]*?)\}/g, '$2').replace(/\s+/g, '');
|
||||||
|
} else if (filePath.includes(' => ')) {
|
||||||
|
filePath = filePath.split(' => ')[1].trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
filePath = filePath.trim();
|
||||||
|
if (!filePath) continue;
|
||||||
|
|
||||||
|
if (!ownership[filePath]) ownership[filePath] = {};
|
||||||
|
ownership[filePath][currentAuthor] = (ownership[filePath][currentAuthor] || 0) + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ownership;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute ownership metrics for a single file.
|
||||||
|
* @param {Object} authorCounts - { [author]: commitCount }
|
||||||
|
* @param {number} ownershipThreshold - min fraction to count toward bus-factor (default 0.1)
|
||||||
|
* @returns {Object} { totalCommits, authors, busFactor, primaryOwner }
|
||||||
|
*/
|
||||||
|
export function computeOwnership(authorCounts, ownershipThreshold = 0.1) {
|
||||||
|
const entries = Object.entries(authorCounts).sort((a, b) => b[1] - a[1]);
|
||||||
|
const totalCommits = entries.reduce((sum, [, n]) => sum + n, 0);
|
||||||
|
|
||||||
|
const authors = entries.map(([name, commits]) => ({
|
||||||
|
name,
|
||||||
|
commits,
|
||||||
|
pct: totalCommits > 0 ? commits / totalCommits : 0,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const busFactor = authors.filter(a => a.pct >= ownershipThreshold).length;
|
||||||
|
const primaryOwner = authors[0] || null;
|
||||||
|
|
||||||
|
return { totalCommits, authors, busFactor, primaryOwner };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Score all files and return sorted results.
|
||||||
|
* @param {Object} ownership - Output from parseGitLog
|
||||||
|
* @param {Object} options
|
||||||
|
* @returns {Array} Sorted file entries with ownership metrics
|
||||||
|
*/
|
||||||
|
export function scoreFiles(ownership, { minCommits = 2, ownershipThreshold = 0.1 } = {}) {
|
||||||
|
const results = [];
|
||||||
|
|
||||||
|
for (const [filePath, authorCounts] of Object.entries(ownership)) {
|
||||||
|
const metrics = computeOwnership(authorCounts, ownershipThreshold);
|
||||||
|
if (metrics.totalCommits < minCommits) continue;
|
||||||
|
|
||||||
|
results.push({ file: filePath, ...metrics });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort: lowest bus-factor first, then most commits (highest risk first)
|
||||||
|
results.sort((a, b) => {
|
||||||
|
if (a.busFactor !== b.busFactor) return a.busFactor - b.busFactor;
|
||||||
|
return b.totalCommits - a.totalCommits;
|
||||||
|
});
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute overall repo stats (weighted average bus-factor, high-risk count).
|
||||||
|
*/
|
||||||
|
export function repoStats(scoredFiles) {
|
||||||
|
if (scoredFiles.length === 0) return { avgBusFactor: 0, highRiskCount: 0, totalFiles: 0 };
|
||||||
|
|
||||||
|
const totalCommits = scoredFiles.reduce((s, f) => s + f.totalCommits, 0);
|
||||||
|
const weightedBf = scoredFiles.reduce((s, f) => s + f.busFactor * f.totalCommits, 0);
|
||||||
|
const avgBusFactor = totalCommits > 0 ? weightedBf / totalCommits : 0;
|
||||||
|
const highRiskCount = scoredFiles.filter(f => f.busFactor === 1).length;
|
||||||
|
|
||||||
|
return { avgBusFactor, highRiskCount, totalFiles: scoredFiles.length };
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- CLI ---
|
||||||
|
|
||||||
|
function collectGitLog(repoRoot) {
|
||||||
|
const dirs = ['server/src', 'client/src', 'db/migrations'];
|
||||||
|
const cmd = `git -C "${repoRoot}" log --numstat -- ${dirs.join(' ')}`;
|
||||||
|
return execSync(cmd, { maxBuffer: 50 * 1024 * 1024 }).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatReport(scoredFiles, stats, topN = 10) {
|
||||||
|
const lines = [];
|
||||||
|
lines.push('');
|
||||||
|
lines.push('=== Bus-Factor Analysis ===');
|
||||||
|
lines.push('');
|
||||||
|
lines.push(`Files analyzed : ${stats.totalFiles}`);
|
||||||
|
lines.push(`High-risk files: ${stats.highRiskCount} (bus-factor = 1)`);
|
||||||
|
lines.push(`Avg bus-factor : ${stats.avgBusFactor.toFixed(2)} (weighted by commits)`);
|
||||||
|
lines.push('');
|
||||||
|
|
||||||
|
const highRisk = scoredFiles.filter(f => f.busFactor === 1);
|
||||||
|
if (highRisk.length === 0) {
|
||||||
|
lines.push('No high-risk files found.');
|
||||||
|
} else {
|
||||||
|
lines.push(`--- Top ${Math.min(topN, highRisk.length)} High-Risk Files (bus-factor = 1) ---`);
|
||||||
|
lines.push('');
|
||||||
|
for (const f of highRisk.slice(0, topN)) {
|
||||||
|
const owner = f.primaryOwner;
|
||||||
|
lines.push(` ${f.file}`);
|
||||||
|
lines.push(` commits: ${f.totalCommits} owner: ${owner.name} (${(owner.pct * 100).toFixed(0)}%)`);
|
||||||
|
if (f.authors.length > 1) {
|
||||||
|
const others = f.authors.slice(1, 3).map(a => `${a.name} ${(a.pct * 100).toFixed(0)}%`).join(', ');
|
||||||
|
lines.push(` others: ${others}`);
|
||||||
|
}
|
||||||
|
lines.push('');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lines.push('--- Author Contribution Summary ---');
|
||||||
|
lines.push('');
|
||||||
|
const authorTotals = {};
|
||||||
|
for (const f of scoredFiles) {
|
||||||
|
for (const a of f.authors) {
|
||||||
|
authorTotals[a.name] = (authorTotals[a.name] || 0) + a.commits;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const totalAll = Object.values(authorTotals).reduce((s, n) => s + n, 0);
|
||||||
|
const sorted = Object.entries(authorTotals).sort((a, b) => b[1] - a[1]);
|
||||||
|
for (const [name, commits] of sorted) {
|
||||||
|
const pct = totalAll > 0 ? (commits / totalAll * 100).toFixed(1) : '0.0';
|
||||||
|
lines.push(` ${name.padEnd(30)} ${String(commits).padStart(5)} commits (${pct}%)`);
|
||||||
|
}
|
||||||
|
lines.push('');
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect if running as main script (ESM equivalent of require.main === module)
|
||||||
|
const isMain = process.argv[1] === fileURLToPath(import.meta.url);
|
||||||
|
|
||||||
|
if (isMain) {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const jsonMode = args.includes('--json');
|
||||||
|
|
||||||
|
const minCommitsIdx = args.indexOf('--min-commits');
|
||||||
|
const minCommits = minCommitsIdx !== -1 ? parseInt(args[minCommitsIdx + 1], 10) : 2;
|
||||||
|
|
||||||
|
const thresholdIdx = args.indexOf('--threshold');
|
||||||
|
const threshold = thresholdIdx !== -1 ? parseFloat(args[thresholdIdx + 1]) : 0.1;
|
||||||
|
|
||||||
|
const topIdx = args.indexOf('--top');
|
||||||
|
const topN = topIdx !== -1 ? parseInt(args[topIdx + 1], 10) : 10;
|
||||||
|
|
||||||
|
const repoRoot = path.resolve(fileURLToPath(import.meta.url), '..', '..');
|
||||||
|
|
||||||
|
let rawLog;
|
||||||
|
try {
|
||||||
|
rawLog = collectGitLog(repoRoot);
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(`Error running git log: ${err.message}\n`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const ownership = parseGitLog(rawLog);
|
||||||
|
const scoredFiles = scoreFiles(ownership, { minCommits, ownershipThreshold: threshold });
|
||||||
|
const stats = repoStats(scoredFiles);
|
||||||
|
|
||||||
|
if (jsonMode) {
|
||||||
|
process.stdout.write(JSON.stringify({ stats, files: scoredFiles }, null, 2) + '\n');
|
||||||
|
} else {
|
||||||
|
process.stdout.write(formatReport(scoredFiles, stats, topN));
|
||||||
|
}
|
||||||
|
}
|
||||||
202
scripts/bus-factor.test.mjs
Normal file
202
scripts/bus-factor.test.mjs
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import { parseGitLog, computeOwnership, scoreFiles, repoStats } from './bus-factor.mjs';
|
||||||
|
|
||||||
|
// --- parseGitLog ---
|
||||||
|
|
||||||
|
describe('parseGitLog', () => {
|
||||||
|
it('parses a single commit with one file', () => {
|
||||||
|
const raw = `commit abc123
|
||||||
|
Author: Alice <alice@example.com>
|
||||||
|
Date: Mon Jan 1 00:00:00 2024
|
||||||
|
|
||||||
|
Initial commit
|
||||||
|
|
||||||
|
5\t2\tserver/src/index.js
|
||||||
|
`;
|
||||||
|
const result = parseGitLog(raw);
|
||||||
|
expect(result['server/src/index.js']).toEqual({ Alice: 1 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('accumulates multiple commits by the same author', () => {
|
||||||
|
const raw = `commit aaa
|
||||||
|
Author: Alice <alice@example.com>
|
||||||
|
Date: Mon Jan 1 00:00:00 2024
|
||||||
|
|
||||||
|
First
|
||||||
|
|
||||||
|
3\t0\tserver/src/app.js
|
||||||
|
|
||||||
|
commit bbb
|
||||||
|
Author: Alice <alice@example.com>
|
||||||
|
Date: Tue Jan 2 00:00:00 2024
|
||||||
|
|
||||||
|
Second
|
||||||
|
|
||||||
|
1\t1\tserver/src/app.js
|
||||||
|
`;
|
||||||
|
const result = parseGitLog(raw);
|
||||||
|
expect(result['server/src/app.js']['Alice']).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('tracks multiple authors for the same file', () => {
|
||||||
|
const raw = `commit aaa
|
||||||
|
Author: Alice <alice@example.com>
|
||||||
|
Date: Mon Jan 1 00:00:00 2024
|
||||||
|
|
||||||
|
Alice commit
|
||||||
|
|
||||||
|
2\t0\tclient/src/App.jsx
|
||||||
|
|
||||||
|
commit bbb
|
||||||
|
Author: Bob <bob@example.com>
|
||||||
|
Date: Tue Jan 2 00:00:00 2024
|
||||||
|
|
||||||
|
Bob commit
|
||||||
|
|
||||||
|
1\t0\tclient/src/App.jsx
|
||||||
|
`;
|
||||||
|
const result = parseGitLog(raw);
|
||||||
|
expect(result['client/src/App.jsx']['Alice']).toBe(1);
|
||||||
|
expect(result['client/src/App.jsx']['Bob']).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles multiple files per commit', () => {
|
||||||
|
const raw = `commit aaa
|
||||||
|
Author: Alice <alice@example.com>
|
||||||
|
Date: Mon Jan 1 00:00:00 2024
|
||||||
|
|
||||||
|
Multi-file commit
|
||||||
|
|
||||||
|
2\t0\tserver/src/a.js
|
||||||
|
3\t1\tserver/src/b.js
|
||||||
|
`;
|
||||||
|
const result = parseGitLog(raw);
|
||||||
|
expect(result['server/src/a.js']['Alice']).toBe(1);
|
||||||
|
expect(result['server/src/b.js']['Alice']).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles rename syntax (old => new)', () => {
|
||||||
|
const raw = `commit aaa
|
||||||
|
Author: Alice <alice@example.com>
|
||||||
|
Date: Mon Jan 1 00:00:00 2024
|
||||||
|
|
||||||
|
Rename
|
||||||
|
|
||||||
|
2\t0\told/path.js => new/path.js
|
||||||
|
`;
|
||||||
|
const result = parseGitLog(raw);
|
||||||
|
expect(result['new/path.js']).toBeDefined();
|
||||||
|
expect(result['old/path.js']).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty object for empty log', () => {
|
||||||
|
expect(parseGitLog('')).toEqual({});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- computeOwnership ---
|
||||||
|
|
||||||
|
describe('computeOwnership', () => {
|
||||||
|
it('computes bus-factor of 1 for a solo author', () => {
|
||||||
|
const result = computeOwnership({ Alice: 10 });
|
||||||
|
expect(result.busFactor).toBe(1);
|
||||||
|
expect(result.totalCommits).toBe(10);
|
||||||
|
expect(result.primaryOwner.name).toBe('Alice');
|
||||||
|
expect(result.primaryOwner.pct).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('computes bus-factor of 2 when two authors each own >= 10%', () => {
|
||||||
|
const result = computeOwnership({ Alice: 8, Bob: 2 });
|
||||||
|
expect(result.busFactor).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not count authors below the threshold', () => {
|
||||||
|
// Bob has 5% — below default 10% threshold
|
||||||
|
const result = computeOwnership({ Alice: 19, Bob: 1 });
|
||||||
|
expect(result.busFactor).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('respects a custom ownership threshold', () => {
|
||||||
|
// With 20% threshold, Bob (10%) doesn't count
|
||||||
|
const result = computeOwnership({ Alice: 9, Bob: 1 }, 0.2);
|
||||||
|
expect(result.busFactor).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sorts authors by commit count descending', () => {
|
||||||
|
const result = computeOwnership({ Alice: 3, Bob: 7, Carol: 5 });
|
||||||
|
expect(result.authors[0].name).toBe('Bob');
|
||||||
|
expect(result.authors[1].name).toBe('Carol');
|
||||||
|
expect(result.authors[2].name).toBe('Alice');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles empty author counts gracefully', () => {
|
||||||
|
const result = computeOwnership({});
|
||||||
|
expect(result.totalCommits).toBe(0);
|
||||||
|
expect(result.busFactor).toBe(0);
|
||||||
|
expect(result.primaryOwner).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- scoreFiles ---
|
||||||
|
|
||||||
|
describe('scoreFiles', () => {
|
||||||
|
const ownership = {
|
||||||
|
'server/src/risk.js': { Alice: 9, Bob: 1 }, // bus-factor 1 (Bob < 10%)
|
||||||
|
'server/src/shared.js': { Alice: 5, Bob: 5 }, // bus-factor 2
|
||||||
|
'server/src/tiny.js': { Alice: 1 }, // below minCommits=2, filtered
|
||||||
|
};
|
||||||
|
|
||||||
|
it('filters files below minCommits', () => {
|
||||||
|
const results = scoreFiles(ownership, { minCommits: 2 });
|
||||||
|
expect(results.find(f => f.file === 'server/src/tiny.js')).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes files at or above minCommits', () => {
|
||||||
|
const results = scoreFiles(ownership, { minCommits: 2 });
|
||||||
|
const files = results.map(f => f.file);
|
||||||
|
expect(files).toContain('server/src/risk.js');
|
||||||
|
expect(files).toContain('server/src/shared.js');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sorts lowest bus-factor first', () => {
|
||||||
|
const results = scoreFiles(ownership, { minCommits: 2 });
|
||||||
|
expect(results[0].file).toBe('server/src/risk.js');
|
||||||
|
expect(results[1].file).toBe('server/src/shared.js');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty array for empty ownership', () => {
|
||||||
|
expect(scoreFiles({}, {})).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- repoStats ---
|
||||||
|
|
||||||
|
describe('repoStats', () => {
|
||||||
|
it('returns zeros for empty input', () => {
|
||||||
|
const stats = repoStats([]);
|
||||||
|
expect(stats.avgBusFactor).toBe(0);
|
||||||
|
expect(stats.highRiskCount).toBe(0);
|
||||||
|
expect(stats.totalFiles).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('counts high-risk files (busFactor === 1)', () => {
|
||||||
|
const files = [
|
||||||
|
{ busFactor: 1, totalCommits: 10, authors: [] },
|
||||||
|
{ busFactor: 2, totalCommits: 5, authors: [] },
|
||||||
|
{ busFactor: 1, totalCommits: 3, authors: [] },
|
||||||
|
];
|
||||||
|
const stats = repoStats(files);
|
||||||
|
expect(stats.highRiskCount).toBe(2);
|
||||||
|
expect(stats.totalFiles).toBe(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('computes weighted average bus-factor', () => {
|
||||||
|
const files = [
|
||||||
|
{ busFactor: 1, totalCommits: 10, authors: [] },
|
||||||
|
{ busFactor: 3, totalCommits: 10, authors: [] },
|
||||||
|
];
|
||||||
|
const stats = repoStats(files);
|
||||||
|
// (1*10 + 3*10) / 20 = 2
|
||||||
|
expect(stats.avgBusFactor).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,251 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
/**
|
|
||||||
* metrics-coverage.js — Static analysis script for metrics/logging instrumentation coverage.
|
|
||||||
*
|
|
||||||
* Scans all Express route files in server/src/routes/*.js and app.js to measure
|
|
||||||
* how many route handlers contain logging calls (console.error/console.warn/console.log).
|
|
||||||
*
|
|
||||||
* Usage:
|
|
||||||
* node scripts/metrics-coverage.js # JSON output (default)
|
|
||||||
* node scripts/metrics-coverage.js --format=text # Human-readable table
|
|
||||||
*
|
|
||||||
* Sample output (captured 2026-03-20):
|
|
||||||
* {
|
|
||||||
* "files": [
|
|
||||||
* { "file": "actuals.js", "total": 5, "logged": 5, "unlogged": 0, "coverage": 100 },
|
|
||||||
* { "file": "bills.js", "total": 6, "logged": 6, "unlogged": 0, "coverage": 100 },
|
|
||||||
* { "file": "config.js", "total": 2, "logged": 2, "unlogged": 0, "coverage": 100 },
|
|
||||||
* { "file": "financing.js", "total": 6, "logged": 6, "unlogged": 0, "coverage": 100 },
|
|
||||||
* { "file": "health.js", "total": 1, "logged": 0, "unlogged": 1, "coverage": 0 },
|
|
||||||
* { "file": "one-time-expenses.js", "total": 3, "logged": 3, "unlogged": 0, "coverage": 100 },
|
|
||||||
* { "file": "paychecks.js", "total": 6, "logged": 6, "unlogged": 0, "coverage": 100 },
|
|
||||||
* { "file": "summary.js", "total": 2, "logged": 2, "unlogged": 0, "coverage": 100 }
|
|
||||||
* ],
|
|
||||||
* "app": {
|
|
||||||
* "has_request_timing_middleware": false,
|
|
||||||
* "has_error_handling_middleware": false,
|
|
||||||
* "middleware_count": 11
|
|
||||||
* },
|
|
||||||
* "aggregate": {
|
|
||||||
* "total_handlers": 31,
|
|
||||||
* "logged_handlers": 30,
|
|
||||||
* "unlogged_handlers": 1,
|
|
||||||
* "coverage_pct": 96.77
|
|
||||||
* }
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
const ROUTES_DIR = path.resolve(__dirname, '../server/src/routes');
|
|
||||||
const APP_FILE = path.resolve(__dirname, '../server/src/app.js');
|
|
||||||
|
|
||||||
// Regex patterns for route handler definitions.
|
|
||||||
// Matches: router.get/post/put/patch/delete( and app.get/post/put/patch/delete(
|
|
||||||
const ROUTE_DEF_RE = /\b(?:router|app)\.(get|post|put|patch|delete)\s*\(/g;
|
|
||||||
|
|
||||||
// Logging call patterns
|
|
||||||
const LOG_RE = /\bconsole\.(error|warn|log)\s*\(/;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract individual route handler bodies from source.
|
|
||||||
* Strategy: find each route definition, then walk forward counting
|
|
||||||
* braces to find the closing of the outermost async/function callback.
|
|
||||||
*/
|
|
||||||
function extractHandlerBodies(src) {
|
|
||||||
const handlers = [];
|
|
||||||
let match;
|
|
||||||
ROUTE_DEF_RE.lastIndex = 0;
|
|
||||||
|
|
||||||
while ((match = ROUTE_DEF_RE.exec(src)) !== null) {
|
|
||||||
const startIdx = match.index;
|
|
||||||
// Find the opening paren of the route call
|
|
||||||
const parenOpen = src.indexOf('(', startIdx);
|
|
||||||
if (parenOpen === -1) continue;
|
|
||||||
|
|
||||||
// Walk from the paren open, tracking paren depth to find the matching close.
|
|
||||||
// The handler callback body will be inside the outer parens.
|
|
||||||
let depth = 0;
|
|
||||||
let bodyStart = -1;
|
|
||||||
let bodyEnd = -1;
|
|
||||||
let inString = false;
|
|
||||||
let stringChar = '';
|
|
||||||
let i = parenOpen;
|
|
||||||
|
|
||||||
while (i < src.length) {
|
|
||||||
const ch = src[i];
|
|
||||||
|
|
||||||
// Basic string tracking (skip contents of string literals)
|
|
||||||
if (!inString && (ch === '"' || ch === "'" || ch === '`')) {
|
|
||||||
inString = true;
|
|
||||||
stringChar = ch;
|
|
||||||
i++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (inString) {
|
|
||||||
if (ch === '\\') { i += 2; continue; } // skip escape
|
|
||||||
if (ch === stringChar) inString = false;
|
|
||||||
i++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ch === '(') {
|
|
||||||
depth++;
|
|
||||||
if (depth === 1) {
|
|
||||||
// This is the opening of the route call args
|
|
||||||
}
|
|
||||||
} else if (ch === ')') {
|
|
||||||
depth--;
|
|
||||||
if (depth === 0) {
|
|
||||||
bodyEnd = i;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else if (ch === '{' && depth >= 1 && bodyStart === -1) {
|
|
||||||
// First brace inside the outer parens — start of the handler body
|
|
||||||
bodyStart = i;
|
|
||||||
}
|
|
||||||
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bodyStart !== -1 && bodyEnd !== -1) {
|
|
||||||
handlers.push(src.slice(bodyStart, bodyEnd));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return handlers;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Analyse a single route file.
|
|
||||||
*/
|
|
||||||
function analyseRouteFile(filePath) {
|
|
||||||
const src = fs.readFileSync(filePath, 'utf8');
|
|
||||||
const handlers = extractHandlerBodies(src);
|
|
||||||
|
|
||||||
const logged = handlers.filter(body => LOG_RE.test(body));
|
|
||||||
|
|
||||||
return {
|
|
||||||
file: path.basename(filePath),
|
|
||||||
total: handlers.length,
|
|
||||||
logged: logged.length,
|
|
||||||
unlogged: handlers.length - logged.length,
|
|
||||||
coverage: handlers.length === 0
|
|
||||||
? null
|
|
||||||
: Math.round((logged.length / handlers.length) * 10000) / 100,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Analyse app.js for middleware-level instrumentation.
|
|
||||||
*/
|
|
||||||
function analyseApp(filePath) {
|
|
||||||
const src = fs.readFileSync(filePath, 'utf8');
|
|
||||||
|
|
||||||
// Request timing: morgan, custom middleware checking req.method, Date.now() at top-level use()
|
|
||||||
const hasRequestTiming =
|
|
||||||
/\brequire\s*\(\s*['"]morgan['"]\s*\)/.test(src) ||
|
|
||||||
/app\.use\s*\(.*Date\.now\(\)/.test(src) ||
|
|
||||||
/app\.use\s*\(.*req,\s*res,\s*next/.test(src) && /Date\.now|performance\.now/.test(src);
|
|
||||||
|
|
||||||
// Error handling middleware: app.use((err, req, res, next) => ...)
|
|
||||||
const hasErrorHandling = /app\.use\s*\(\s*(?:\S+\s*,\s*)?\(\s*err\s*,/.test(src);
|
|
||||||
|
|
||||||
// Count top-level app.use() calls (middleware registrations)
|
|
||||||
const middlewareMatches = src.match(/app\.use\s*\(/g) || [];
|
|
||||||
|
|
||||||
return {
|
|
||||||
has_request_timing_middleware: hasRequestTiming,
|
|
||||||
has_error_handling_middleware: hasErrorHandling,
|
|
||||||
middleware_count: middlewareMatches.length,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function run() {
|
|
||||||
const format = process.argv.includes('--format=text') ? 'text' : 'json';
|
|
||||||
|
|
||||||
// Analyse all route files
|
|
||||||
const routeFiles = fs.readdirSync(ROUTES_DIR)
|
|
||||||
.filter(f => f.endsWith('.js'))
|
|
||||||
.sort();
|
|
||||||
|
|
||||||
const fileResults = routeFiles.map(f =>
|
|
||||||
analyseRouteFile(path.join(ROUTES_DIR, f))
|
|
||||||
);
|
|
||||||
|
|
||||||
// Aggregate
|
|
||||||
const totalHandlers = fileResults.reduce((s, r) => s + r.total, 0);
|
|
||||||
const loggedHandlers = fileResults.reduce((s, r) => s + r.logged, 0);
|
|
||||||
|
|
||||||
const aggregate = {
|
|
||||||
total_handlers: totalHandlers,
|
|
||||||
logged_handlers: loggedHandlers,
|
|
||||||
unlogged_handlers: totalHandlers - loggedHandlers,
|
|
||||||
coverage_pct: totalHandlers === 0
|
|
||||||
? null
|
|
||||||
: Math.round((loggedHandlers / totalHandlers) * 10000) / 100,
|
|
||||||
};
|
|
||||||
|
|
||||||
const appInfo = analyseApp(APP_FILE);
|
|
||||||
|
|
||||||
const result = {
|
|
||||||
files: fileResults,
|
|
||||||
app: appInfo,
|
|
||||||
aggregate,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (format === 'json') {
|
|
||||||
console.log(JSON.stringify(result, null, 2));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Text table
|
|
||||||
const COL_FILE = 28;
|
|
||||||
const COL_TOTAL = 7;
|
|
||||||
const COL_LOGGED = 8;
|
|
||||||
const COL_COVER = 10;
|
|
||||||
|
|
||||||
const pad = (s, n) => String(s).padEnd(n);
|
|
||||||
const lpad = (s, n) => String(s).padStart(n);
|
|
||||||
|
|
||||||
const hr = '-'.repeat(COL_FILE + COL_TOTAL + COL_LOGGED + COL_COVER + 6);
|
|
||||||
|
|
||||||
console.log('\nMetrics Instrumentation Coverage\n');
|
|
||||||
console.log(
|
|
||||||
pad('Route File', COL_FILE) +
|
|
||||||
lpad('Handlers', COL_TOTAL) +
|
|
||||||
lpad('Logged', COL_LOGGED) +
|
|
||||||
lpad('Coverage', COL_COVER)
|
|
||||||
);
|
|
||||||
console.log(hr);
|
|
||||||
|
|
||||||
for (const r of fileResults) {
|
|
||||||
const cov = r.coverage === null ? 'N/A' : `${r.coverage}%`;
|
|
||||||
console.log(
|
|
||||||
pad(r.file, COL_FILE) +
|
|
||||||
lpad(r.total, COL_TOTAL) +
|
|
||||||
lpad(r.logged, COL_LOGGED) +
|
|
||||||
lpad(cov, COL_COVER)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(hr);
|
|
||||||
const aggCov = aggregate.coverage_pct === null ? 'N/A' : `${aggregate.coverage_pct}%`;
|
|
||||||
console.log(
|
|
||||||
pad('TOTAL', COL_FILE) +
|
|
||||||
lpad(aggregate.total_handlers, COL_TOTAL) +
|
|
||||||
lpad(aggregate.logged_handlers, COL_LOGGED) +
|
|
||||||
lpad(aggCov, COL_COVER)
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('\napp.js middleware:');
|
|
||||||
console.log(` Request timing middleware : ${appInfo.has_request_timing_middleware}`);
|
|
||||||
console.log(` Error handling middleware : ${appInfo.has_error_handling_middleware}`);
|
|
||||||
console.log(` app.use() call count : ${appInfo.middleware_count}`);
|
|
||||||
console.log('');
|
|
||||||
}
|
|
||||||
|
|
||||||
run();
|
|
||||||
1161
scripts/package-lock.json
generated
Normal file
1161
scripts/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
12
scripts/package.json
Normal file
12
scripts/package.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"name": "budget-scripts",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"scripts": {
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest",
|
||||||
|
"bus-factor": "node bus-factor.mjs"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"vitest": "^4.1.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
8
scripts/vitest.config.mjs
Normal file
8
scripts/vitest.config.mjs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import { defineConfig } from 'vitest/config';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
test: {
|
||||||
|
globals: true,
|
||||||
|
environment: 'node',
|
||||||
|
},
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user