Compare commits
2 Commits
bus-factor
...
doc-drift/
| Author | SHA1 | Date | |
|---|---|---|---|
| 508ba06e69 | |||
| 5ca15118f8 |
@@ -77,6 +77,12 @@ cd client && npm run test:watch
|
|||||||
- Export pure functions (validators, formatters, etc.) for direct testing
|
- Export pure functions (validators, formatters, etc.) for direct testing
|
||||||
- Run `npm test` in both `server/` and `client/` before committing
|
- Run `npm test` in both `server/` and `client/` before committing
|
||||||
|
|
||||||
|
**Doc drift check:**
|
||||||
|
```bash
|
||||||
|
node scripts/doc-drift.js
|
||||||
|
```
|
||||||
|
Scans `CLAUDE.md` and `PRD.md` for verifiable code references (file paths, API routes, component names) and cross-checks each against the filesystem and source tree. Prints a PASS/FAIL report with doc name and line number. Exits non-zero on any failure — suitable for CI gating.
|
||||||
|
|
||||||
## Application Structure
|
## Application Structure
|
||||||
|
|
||||||
The default route `/` renders the paycheck-centric main view (`client/src/pages/PaycheckView.jsx`). It shows the current month's two paychecks side-by-side with bills, paid status, one-time expenses, and remaining balance. Month navigation (prev/next) fetches data via `GET /api/paychecks?year=&month=`.
|
The default route `/` renders the paycheck-centric main view (`client/src/pages/PaycheckView.jsx`). It shows the current month's two paychecks side-by-side with bills, paid status, one-time expenses, and remaining balance. Month navigation (prev/next) fetches data via `GET /api/paychecks?year=&month=`.
|
||||||
|
|||||||
@@ -1,213 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
/**
|
|
||||||
* Bus-Factor Analyzer
|
|
||||||
* Analyzes code ownership concentration by examining git commit history.
|
|
||||||
* Usage: node scripts/bus-factor.js [--json] [--min-commits N] [--threshold N] [--top N]
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { execSync } from 'child_process';
|
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
import path from 'path';
|
|
||||||
|
|
||||||
// --- Pure analysis functions (exported for testing) ---
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse raw `git log --numstat` output into a map of file -> author -> commitCount.
|
|
||||||
* @param {string} rawLog - Output from git log --numstat
|
|
||||||
* @returns {Object} { [filePath]: { [author]: number } }
|
|
||||||
*/
|
|
||||||
export function parseGitLog(rawLog) {
|
|
||||||
const ownership = {};
|
|
||||||
const lines = rawLog.split('\n');
|
|
||||||
let currentAuthor = null;
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
// Commit header line: "commit <hash>"
|
|
||||||
if (line.startsWith('commit ')) {
|
|
||||||
currentAuthor = null;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Author line: "Author: Name <email>"
|
|
||||||
const authorMatch = line.match(/^Author:\s+(.+?)\s+<[^>]+>/);
|
|
||||||
if (authorMatch) {
|
|
||||||
currentAuthor = authorMatch[1].trim();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Numstat line: "<added>\t<deleted>\t<filename>"
|
|
||||||
if (currentAuthor && /^\d+\t\d+\t/.test(line)) {
|
|
||||||
const parts = line.split('\t');
|
|
||||||
if (parts.length < 3) continue;
|
|
||||||
|
|
||||||
// Handle rename: "old/path => new/path" or "{old => new}/suffix"
|
|
||||||
let filePath = parts[2];
|
|
||||||
if (filePath.includes('{') && filePath.includes('=>')) {
|
|
||||||
filePath = filePath.replace(/\{([^}]*?)\s*=>\s*([^}]*?)\}/g, '$2').replace(/\s+/g, '');
|
|
||||||
} else if (filePath.includes(' => ')) {
|
|
||||||
filePath = filePath.split(' => ')[1].trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
filePath = filePath.trim();
|
|
||||||
if (!filePath) continue;
|
|
||||||
|
|
||||||
if (!ownership[filePath]) ownership[filePath] = {};
|
|
||||||
ownership[filePath][currentAuthor] = (ownership[filePath][currentAuthor] || 0) + 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ownership;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compute ownership metrics for a single file.
|
|
||||||
* @param {Object} authorCounts - { [author]: commitCount }
|
|
||||||
* @param {number} ownershipThreshold - min fraction to count toward bus-factor (default 0.1)
|
|
||||||
* @returns {Object} { totalCommits, authors, busFactor, primaryOwner }
|
|
||||||
*/
|
|
||||||
export function computeOwnership(authorCounts, ownershipThreshold = 0.1) {
|
|
||||||
const entries = Object.entries(authorCounts).sort((a, b) => b[1] - a[1]);
|
|
||||||
const totalCommits = entries.reduce((sum, [, n]) => sum + n, 0);
|
|
||||||
|
|
||||||
const authors = entries.map(([name, commits]) => ({
|
|
||||||
name,
|
|
||||||
commits,
|
|
||||||
pct: totalCommits > 0 ? commits / totalCommits : 0,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const busFactor = authors.filter(a => a.pct >= ownershipThreshold).length;
|
|
||||||
const primaryOwner = authors[0] || null;
|
|
||||||
|
|
||||||
return { totalCommits, authors, busFactor, primaryOwner };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Score all files and return sorted results.
|
|
||||||
* @param {Object} ownership - Output from parseGitLog
|
|
||||||
* @param {Object} options
|
|
||||||
* @returns {Array} Sorted file entries with ownership metrics
|
|
||||||
*/
|
|
||||||
export function scoreFiles(ownership, { minCommits = 2, ownershipThreshold = 0.1 } = {}) {
|
|
||||||
const results = [];
|
|
||||||
|
|
||||||
for (const [filePath, authorCounts] of Object.entries(ownership)) {
|
|
||||||
const metrics = computeOwnership(authorCounts, ownershipThreshold);
|
|
||||||
if (metrics.totalCommits < minCommits) continue;
|
|
||||||
|
|
||||||
results.push({ file: filePath, ...metrics });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort: lowest bus-factor first, then most commits (highest risk first)
|
|
||||||
results.sort((a, b) => {
|
|
||||||
if (a.busFactor !== b.busFactor) return a.busFactor - b.busFactor;
|
|
||||||
return b.totalCommits - a.totalCommits;
|
|
||||||
});
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compute overall repo stats (weighted average bus-factor, high-risk count).
|
|
||||||
*/
|
|
||||||
export function repoStats(scoredFiles) {
|
|
||||||
if (scoredFiles.length === 0) return { avgBusFactor: 0, highRiskCount: 0, totalFiles: 0 };
|
|
||||||
|
|
||||||
const totalCommits = scoredFiles.reduce((s, f) => s + f.totalCommits, 0);
|
|
||||||
const weightedBf = scoredFiles.reduce((s, f) => s + f.busFactor * f.totalCommits, 0);
|
|
||||||
const avgBusFactor = totalCommits > 0 ? weightedBf / totalCommits : 0;
|
|
||||||
const highRiskCount = scoredFiles.filter(f => f.busFactor === 1).length;
|
|
||||||
|
|
||||||
return { avgBusFactor, highRiskCount, totalFiles: scoredFiles.length };
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- CLI ---
|
|
||||||
|
|
||||||
function collectGitLog(repoRoot) {
|
|
||||||
const dirs = ['server/src', 'client/src', 'db/migrations'];
|
|
||||||
const cmd = `git -C "${repoRoot}" log --numstat -- ${dirs.join(' ')}`;
|
|
||||||
return execSync(cmd, { maxBuffer: 50 * 1024 * 1024 }).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatReport(scoredFiles, stats, topN = 10) {
|
|
||||||
const lines = [];
|
|
||||||
lines.push('');
|
|
||||||
lines.push('=== Bus-Factor Analysis ===');
|
|
||||||
lines.push('');
|
|
||||||
lines.push(`Files analyzed : ${stats.totalFiles}`);
|
|
||||||
lines.push(`High-risk files: ${stats.highRiskCount} (bus-factor = 1)`);
|
|
||||||
lines.push(`Avg bus-factor : ${stats.avgBusFactor.toFixed(2)} (weighted by commits)`);
|
|
||||||
lines.push('');
|
|
||||||
|
|
||||||
const highRisk = scoredFiles.filter(f => f.busFactor === 1);
|
|
||||||
if (highRisk.length === 0) {
|
|
||||||
lines.push('No high-risk files found.');
|
|
||||||
} else {
|
|
||||||
lines.push(`--- Top ${Math.min(topN, highRisk.length)} High-Risk Files (bus-factor = 1) ---`);
|
|
||||||
lines.push('');
|
|
||||||
for (const f of highRisk.slice(0, topN)) {
|
|
||||||
const owner = f.primaryOwner;
|
|
||||||
lines.push(` ${f.file}`);
|
|
||||||
lines.push(` commits: ${f.totalCommits} owner: ${owner.name} (${(owner.pct * 100).toFixed(0)}%)`);
|
|
||||||
if (f.authors.length > 1) {
|
|
||||||
const others = f.authors.slice(1, 3).map(a => `${a.name} ${(a.pct * 100).toFixed(0)}%`).join(', ');
|
|
||||||
lines.push(` others: ${others}`);
|
|
||||||
}
|
|
||||||
lines.push('');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('--- Author Contribution Summary ---');
|
|
||||||
lines.push('');
|
|
||||||
const authorTotals = {};
|
|
||||||
for (const f of scoredFiles) {
|
|
||||||
for (const a of f.authors) {
|
|
||||||
authorTotals[a.name] = (authorTotals[a.name] || 0) + a.commits;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const totalAll = Object.values(authorTotals).reduce((s, n) => s + n, 0);
|
|
||||||
const sorted = Object.entries(authorTotals).sort((a, b) => b[1] - a[1]);
|
|
||||||
for (const [name, commits] of sorted) {
|
|
||||||
const pct = totalAll > 0 ? (commits / totalAll * 100).toFixed(1) : '0.0';
|
|
||||||
lines.push(` ${name.padEnd(30)} ${String(commits).padStart(5)} commits (${pct}%)`);
|
|
||||||
}
|
|
||||||
lines.push('');
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect if running as main script (ESM equivalent of require.main === module)
|
|
||||||
const isMain = process.argv[1] === fileURLToPath(import.meta.url);
|
|
||||||
|
|
||||||
if (isMain) {
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
const jsonMode = args.includes('--json');
|
|
||||||
|
|
||||||
const minCommitsIdx = args.indexOf('--min-commits');
|
|
||||||
const minCommits = minCommitsIdx !== -1 ? parseInt(args[minCommitsIdx + 1], 10) : 2;
|
|
||||||
|
|
||||||
const thresholdIdx = args.indexOf('--threshold');
|
|
||||||
const threshold = thresholdIdx !== -1 ? parseFloat(args[thresholdIdx + 1]) : 0.1;
|
|
||||||
|
|
||||||
const topIdx = args.indexOf('--top');
|
|
||||||
const topN = topIdx !== -1 ? parseInt(args[topIdx + 1], 10) : 10;
|
|
||||||
|
|
||||||
const repoRoot = path.resolve(fileURLToPath(import.meta.url), '..', '..');
|
|
||||||
|
|
||||||
let rawLog;
|
|
||||||
try {
|
|
||||||
rawLog = collectGitLog(repoRoot);
|
|
||||||
} catch (err) {
|
|
||||||
process.stderr.write(`Error running git log: ${err.message}\n`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const ownership = parseGitLog(rawLog);
|
|
||||||
const scoredFiles = scoreFiles(ownership, { minCommits, ownershipThreshold: threshold });
|
|
||||||
const stats = repoStats(scoredFiles);
|
|
||||||
|
|
||||||
if (jsonMode) {
|
|
||||||
process.stdout.write(JSON.stringify({ stats, files: scoredFiles }, null, 2) + '\n');
|
|
||||||
} else {
|
|
||||||
process.stdout.write(formatReport(scoredFiles, stats, topN));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,202 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
import { parseGitLog, computeOwnership, scoreFiles, repoStats } from './bus-factor.mjs';
|
|
||||||
|
|
||||||
// --- parseGitLog ---
|
|
||||||
|
|
||||||
describe('parseGitLog', () => {
|
|
||||||
it('parses a single commit with one file', () => {
|
|
||||||
const raw = `commit abc123
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Initial commit
|
|
||||||
|
|
||||||
5\t2\tserver/src/index.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['server/src/index.js']).toEqual({ Alice: 1 });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('accumulates multiple commits by the same author', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
First
|
|
||||||
|
|
||||||
3\t0\tserver/src/app.js
|
|
||||||
|
|
||||||
commit bbb
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Tue Jan 2 00:00:00 2024
|
|
||||||
|
|
||||||
Second
|
|
||||||
|
|
||||||
1\t1\tserver/src/app.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['server/src/app.js']['Alice']).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('tracks multiple authors for the same file', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Alice commit
|
|
||||||
|
|
||||||
2\t0\tclient/src/App.jsx
|
|
||||||
|
|
||||||
commit bbb
|
|
||||||
Author: Bob <bob@example.com>
|
|
||||||
Date: Tue Jan 2 00:00:00 2024
|
|
||||||
|
|
||||||
Bob commit
|
|
||||||
|
|
||||||
1\t0\tclient/src/App.jsx
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['client/src/App.jsx']['Alice']).toBe(1);
|
|
||||||
expect(result['client/src/App.jsx']['Bob']).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles multiple files per commit', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Multi-file commit
|
|
||||||
|
|
||||||
2\t0\tserver/src/a.js
|
|
||||||
3\t1\tserver/src/b.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['server/src/a.js']['Alice']).toBe(1);
|
|
||||||
expect(result['server/src/b.js']['Alice']).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles rename syntax (old => new)', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Rename
|
|
||||||
|
|
||||||
2\t0\told/path.js => new/path.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['new/path.js']).toBeDefined();
|
|
||||||
expect(result['old/path.js']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns empty object for empty log', () => {
|
|
||||||
expect(parseGitLog('')).toEqual({});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- computeOwnership ---
|
|
||||||
|
|
||||||
describe('computeOwnership', () => {
|
|
||||||
it('computes bus-factor of 1 for a solo author', () => {
|
|
||||||
const result = computeOwnership({ Alice: 10 });
|
|
||||||
expect(result.busFactor).toBe(1);
|
|
||||||
expect(result.totalCommits).toBe(10);
|
|
||||||
expect(result.primaryOwner.name).toBe('Alice');
|
|
||||||
expect(result.primaryOwner.pct).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('computes bus-factor of 2 when two authors each own >= 10%', () => {
|
|
||||||
const result = computeOwnership({ Alice: 8, Bob: 2 });
|
|
||||||
expect(result.busFactor).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not count authors below the threshold', () => {
|
|
||||||
// Bob has 5% — below default 10% threshold
|
|
||||||
const result = computeOwnership({ Alice: 19, Bob: 1 });
|
|
||||||
expect(result.busFactor).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('respects a custom ownership threshold', () => {
|
|
||||||
// With 20% threshold, Bob (10%) doesn't count
|
|
||||||
const result = computeOwnership({ Alice: 9, Bob: 1 }, 0.2);
|
|
||||||
expect(result.busFactor).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sorts authors by commit count descending', () => {
|
|
||||||
const result = computeOwnership({ Alice: 3, Bob: 7, Carol: 5 });
|
|
||||||
expect(result.authors[0].name).toBe('Bob');
|
|
||||||
expect(result.authors[1].name).toBe('Carol');
|
|
||||||
expect(result.authors[2].name).toBe('Alice');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles empty author counts gracefully', () => {
|
|
||||||
const result = computeOwnership({});
|
|
||||||
expect(result.totalCommits).toBe(0);
|
|
||||||
expect(result.busFactor).toBe(0);
|
|
||||||
expect(result.primaryOwner).toBeNull();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- scoreFiles ---
|
|
||||||
|
|
||||||
describe('scoreFiles', () => {
|
|
||||||
const ownership = {
|
|
||||||
'server/src/risk.js': { Alice: 9, Bob: 1 }, // bus-factor 1 (Bob < 10%)
|
|
||||||
'server/src/shared.js': { Alice: 5, Bob: 5 }, // bus-factor 2
|
|
||||||
'server/src/tiny.js': { Alice: 1 }, // below minCommits=2, filtered
|
|
||||||
};
|
|
||||||
|
|
||||||
it('filters files below minCommits', () => {
|
|
||||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
|
||||||
expect(results.find(f => f.file === 'server/src/tiny.js')).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('includes files at or above minCommits', () => {
|
|
||||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
|
||||||
const files = results.map(f => f.file);
|
|
||||||
expect(files).toContain('server/src/risk.js');
|
|
||||||
expect(files).toContain('server/src/shared.js');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sorts lowest bus-factor first', () => {
|
|
||||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
|
||||||
expect(results[0].file).toBe('server/src/risk.js');
|
|
||||||
expect(results[1].file).toBe('server/src/shared.js');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns empty array for empty ownership', () => {
|
|
||||||
expect(scoreFiles({}, {})).toEqual([]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- repoStats ---
|
|
||||||
|
|
||||||
describe('repoStats', () => {
|
|
||||||
it('returns zeros for empty input', () => {
|
|
||||||
const stats = repoStats([]);
|
|
||||||
expect(stats.avgBusFactor).toBe(0);
|
|
||||||
expect(stats.highRiskCount).toBe(0);
|
|
||||||
expect(stats.totalFiles).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('counts high-risk files (busFactor === 1)', () => {
|
|
||||||
const files = [
|
|
||||||
{ busFactor: 1, totalCommits: 10, authors: [] },
|
|
||||||
{ busFactor: 2, totalCommits: 5, authors: [] },
|
|
||||||
{ busFactor: 1, totalCommits: 3, authors: [] },
|
|
||||||
];
|
|
||||||
const stats = repoStats(files);
|
|
||||||
expect(stats.highRiskCount).toBe(2);
|
|
||||||
expect(stats.totalFiles).toBe(3);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('computes weighted average bus-factor', () => {
|
|
||||||
const files = [
|
|
||||||
{ busFactor: 1, totalCommits: 10, authors: [] },
|
|
||||||
{ busFactor: 3, totalCommits: 10, authors: [] },
|
|
||||||
];
|
|
||||||
const stats = repoStats(files);
|
|
||||||
// (1*10 + 3*10) / 20 = 2
|
|
||||||
expect(stats.avgBusFactor).toBe(2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
203
scripts/doc-drift.js
Normal file
203
scripts/doc-drift.js
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* doc-drift.js — detects documentation drift by cross-checking verifiable
|
||||||
|
* code references in CLAUDE.md and PRD.md against the filesystem and source tree.
|
||||||
|
*
|
||||||
|
* Usage: node scripts/doc-drift.js
|
||||||
|
* Exits non-zero if any drift is found.
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const { execSync } = require('child_process');
|
||||||
|
|
||||||
|
const ROOT = path.resolve(__dirname, '..');
|
||||||
|
const DOCS = ['CLAUDE.md', 'PRD.md'].map(f => path.join(ROOT, f));
|
||||||
|
|
||||||
|
// ── Result tracking ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const results = [];
|
||||||
|
|
||||||
|
function record(doc, line, kind, ref, pass, reason) {
|
||||||
|
results.push({ doc: path.basename(doc), line, kind, ref, pass, reason });
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Extraction helpers ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/** Extract all backtick spans from a line (may be multiple). */
|
||||||
|
function backtickSpans(line) {
|
||||||
|
const spans = [];
|
||||||
|
const re = /`([^`]+)`/g;
|
||||||
|
let m;
|
||||||
|
while ((m = re.exec(line)) !== null) spans.push(m[1]);
|
||||||
|
return spans;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return true if a span looks like a file/dir path we can verify. */
|
||||||
|
function isFilePath(span) {
|
||||||
|
// Must contain a slash and start with a recognisable project prefix.
|
||||||
|
return (
|
||||||
|
/[/\\]/.test(span) &&
|
||||||
|
/^(client|server|db|scripts|docker-compose)/.test(span) &&
|
||||||
|
// Exclude shell commands, URLs, SQL snippets, etc.
|
||||||
|
!/\s/.test(span) &&
|
||||||
|
!span.includes('=') &&
|
||||||
|
!span.startsWith('http')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return true if a span looks like a component/page reference (*.jsx). */
|
||||||
|
function isJsxRef(span) {
|
||||||
|
return /\w+\.jsx$/.test(span) && !/[/]/.test(span); // bare name, no path
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Extract HTTP API route patterns like `GET /api/paychecks`. */
|
||||||
|
function extractApiRoutes(line) {
|
||||||
|
const routes = [];
|
||||||
|
const re = /\b(GET|POST|PUT|DELETE|PATCH)\s+(\/api\/[^\s,`'")\]]+)/g;
|
||||||
|
let m;
|
||||||
|
while ((m = re.exec(line)) !== null) routes.push({ method: m[1], path: m[2] });
|
||||||
|
return routes;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Verification helpers ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function fileExists(relPath) {
|
||||||
|
return fs.existsSync(path.join(ROOT, relPath));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For API routes: grep server/src/routes/ for the route path string.
|
||||||
|
* We look for the path fragment (everything after /api) as a string literal.
|
||||||
|
*/
|
||||||
|
function apiRouteExists(routePath) {
|
||||||
|
// Strip query-string placeholders like ?year=&month=
|
||||||
|
const clean = routePath.replace(/\?.*$/, '').replace(/:id/g, ':id');
|
||||||
|
// Build a grep-friendly pattern: look for the path minus leading /api
|
||||||
|
const fragment = clean.replace(/^\/api/, '');
|
||||||
|
try {
|
||||||
|
const out = execSync(
|
||||||
|
`grep -rE --include="*.js" -l "${clean}|${fragment}" "${path.join(ROOT, 'server/src/routes')}"`,
|
||||||
|
{ stdio: ['pipe', 'pipe', 'pipe'] }
|
||||||
|
).toString().trim();
|
||||||
|
return out.length > 0;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For bare *.jsx component names: check that a file with that name exists
|
||||||
|
* somewhere under client/src/.
|
||||||
|
*/
|
||||||
|
function jsxComponentExists(name) {
|
||||||
|
try {
|
||||||
|
const out = execSync(
|
||||||
|
`find "${path.join(ROOT, 'client/src')}" -name "${name}" -type f`,
|
||||||
|
{ stdio: ['pipe', 'pipe', 'pipe'] }
|
||||||
|
).toString().trim();
|
||||||
|
return out.length > 0;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Main ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
for (const docPath of DOCS) {
|
||||||
|
if (!fs.existsSync(docPath)) {
|
||||||
|
console.error(`WARN: doc not found: ${docPath}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = fs.readFileSync(docPath, 'utf8').split('\n');
|
||||||
|
|
||||||
|
lines.forEach((rawLine, idx) => {
|
||||||
|
const lineNo = idx + 1;
|
||||||
|
|
||||||
|
// 1. Backtick file paths
|
||||||
|
for (const span of backtickSpans(rawLine)) {
|
||||||
|
if (isFilePath(span)) {
|
||||||
|
const exists = fileExists(span);
|
||||||
|
record(
|
||||||
|
docPath,
|
||||||
|
lineNo,
|
||||||
|
'file-path',
|
||||||
|
span,
|
||||||
|
exists,
|
||||||
|
exists ? 'found on filesystem' : `not found: ${span}`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isJsxRef(span)) {
|
||||||
|
const exists = jsxComponentExists(span);
|
||||||
|
record(
|
||||||
|
docPath,
|
||||||
|
lineNo,
|
||||||
|
'component',
|
||||||
|
span,
|
||||||
|
exists,
|
||||||
|
exists ? 'found under client/src' : `no file named ${span} in client/src`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. API routes (inside or outside backticks)
|
||||||
|
for (const { method, path: routePath } of extractApiRoutes(rawLine)) {
|
||||||
|
const ref = `${method} ${routePath}`;
|
||||||
|
const exists = apiRouteExists(routePath);
|
||||||
|
record(
|
||||||
|
docPath,
|
||||||
|
lineNo,
|
||||||
|
'api-route',
|
||||||
|
ref,
|
||||||
|
exists,
|
||||||
|
exists ? 'found in server/src/routes' : `route not found in server/src/routes`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Report ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const padDoc = Math.max(...results.map(r => r.doc.length), 9);
|
||||||
|
const padKind = Math.max(...results.map(r => r.kind.length), 9);
|
||||||
|
const padRef = Math.min(60, Math.max(...results.map(r => r.ref.length), 10));
|
||||||
|
|
||||||
|
const header = [
|
||||||
|
'STATUS'.padEnd(6),
|
||||||
|
'DOC'.padEnd(padDoc),
|
||||||
|
'LINE'.padStart(4),
|
||||||
|
'KIND'.padEnd(padKind),
|
||||||
|
'REFERENCE',
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
|
console.log('\n' + header);
|
||||||
|
console.log('─'.repeat(header.length + 10));
|
||||||
|
|
||||||
|
let failures = 0;
|
||||||
|
|
||||||
|
for (const r of results) {
|
||||||
|
const status = r.pass ? 'PASS' : 'FAIL';
|
||||||
|
const ref = r.ref.length > padRef ? r.ref.slice(0, padRef - 1) + '…' : r.ref;
|
||||||
|
const line = [
|
||||||
|
(r.pass ? '\x1b[32m' : '\x1b[31m') + status.padEnd(6) + '\x1b[0m',
|
||||||
|
r.doc.padEnd(padDoc),
|
||||||
|
String(r.line).padStart(4),
|
||||||
|
r.kind.padEnd(padKind),
|
||||||
|
ref,
|
||||||
|
].join(' ');
|
||||||
|
console.log(line);
|
||||||
|
if (!r.pass) {
|
||||||
|
console.log(` \x1b[33m↳ ${r.reason}\x1b[0m`);
|
||||||
|
failures++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('─'.repeat(header.length + 10));
|
||||||
|
console.log(`\n${results.length} references checked — ${failures} failure(s)\n`);
|
||||||
|
|
||||||
|
process.exit(failures > 0 ? 1 : 0);
|
||||||
1161
scripts/package-lock.json
generated
1161
scripts/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "budget-scripts",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"scripts": {
|
|
||||||
"test": "vitest run",
|
|
||||||
"test:watch": "vitest",
|
|
||||||
"bus-factor": "node bus-factor.mjs"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"vitest": "^4.1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import { defineConfig } from 'vitest/config';
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
test: {
|
|
||||||
globals: true,
|
|
||||||
environment: 'node',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
Reference in New Issue
Block a user