Compare commits
3 Commits
bus-factor
...
perf-regre
| Author | SHA1 | Date | |
|---|---|---|---|
| 239c8596a7 | |||
| 76a9559514 | |||
| 0a1e3666ef |
13
CLAUDE.md
13
CLAUDE.md
@@ -94,3 +94,16 @@ The default route `/` renders the paycheck-centric main view (`client/src/pages/
|
||||
**Financing:** `GET/POST /api/financing`, `PUT/DELETE /api/financing/:id`, `PATCH /api/financing-payments/:id/paid`. Plans track a total amount, payoff due date, and `start_date`. Payment per period is auto-calculated as `(remaining balance) / (remaining periods)`. Split plans (`assigned_paycheck = null`) divide each period's payment across both paychecks. Plans auto-close when fully paid. Financing payments are included in the paycheck remaining balance. `start_date` prevents a plan from appearing on paycheck months before it was created — both virtual previews and `generate` respect this guard.
|
||||
|
||||
**Migrations:** SQL files in `db/migrations/` are applied in filename order on server startup. Add new migrations as `00N_description.sql` — they run once and are tracked in the `migrations` table.
|
||||
|
||||
## Performance Tooling
|
||||
|
||||
**Timing middleware** (`server/src/middleware/timing.js`): Registered early in `app.js`. Logs every request's method, path, status code, and duration. Emits a `[SLOW]` warning for responses exceeding 200 ms.
|
||||
|
||||
**Benchmark script** (`scripts/perf-benchmark.js`): Hits `GET /api/paychecks`, `GET /api/financing`, and `GET /api/summary/annual` five times each and reports min/mean/max latency. Exits non-zero if any mean exceeds the threshold (default 500 ms, override via `SLOW_THRESHOLD_MS` env var). Target server URL defaults to `http://localhost:3001` (override via `BENCHMARK_URL`).
|
||||
|
||||
```bash
|
||||
cd server && npm run perf # run against localhost:3001
|
||||
BENCHMARK_URL=http://localhost:3000 npm run perf
|
||||
```
|
||||
|
||||
**Performance indexes** (`db/migrations/005_performance_indexes.sql`): Adds indexes on `paychecks(period_year, period_month)`, `paycheck_bills(paycheck_id)`, `actuals(paycheck_id)`, `one_time_expenses(paycheck_id)`, `financing_payments(plan_id)`, and `financing_plans(active)` — applied automatically on server startup.
|
||||
|
||||
7
db/migrations/005_performance_indexes.sql
Normal file
7
db/migrations/005_performance_indexes.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- Performance indexes for high-traffic query patterns
|
||||
CREATE INDEX IF NOT EXISTS idx_paychecks_period ON paychecks(period_year, period_month);
|
||||
CREATE INDEX IF NOT EXISTS idx_paycheck_bills_paycheck_id ON paycheck_bills(paycheck_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_actuals_paycheck_id ON actuals(paycheck_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_one_time_expenses_paycheck_id ON one_time_expenses(paycheck_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_financing_payments_plan_id ON financing_payments(plan_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_financing_plans_active ON financing_plans(active) WHERE active = true;
|
||||
@@ -1,213 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Bus-Factor Analyzer
|
||||
* Analyzes code ownership concentration by examining git commit history.
|
||||
* Usage: node scripts/bus-factor.js [--json] [--min-commits N] [--threshold N] [--top N]
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import { fileURLToPath } from 'url';
|
||||
import path from 'path';
|
||||
|
||||
// --- Pure analysis functions (exported for testing) ---
|
||||
|
||||
/**
|
||||
* Parse raw `git log --numstat` output into a map of file -> author -> commitCount.
|
||||
* @param {string} rawLog - Output from git log --numstat
|
||||
* @returns {Object} { [filePath]: { [author]: number } }
|
||||
*/
|
||||
export function parseGitLog(rawLog) {
|
||||
const ownership = {};
|
||||
const lines = rawLog.split('\n');
|
||||
let currentAuthor = null;
|
||||
|
||||
for (const line of lines) {
|
||||
// Commit header line: "commit <hash>"
|
||||
if (line.startsWith('commit ')) {
|
||||
currentAuthor = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Author line: "Author: Name <email>"
|
||||
const authorMatch = line.match(/^Author:\s+(.+?)\s+<[^>]+>/);
|
||||
if (authorMatch) {
|
||||
currentAuthor = authorMatch[1].trim();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Numstat line: "<added>\t<deleted>\t<filename>"
|
||||
if (currentAuthor && /^\d+\t\d+\t/.test(line)) {
|
||||
const parts = line.split('\t');
|
||||
if (parts.length < 3) continue;
|
||||
|
||||
// Handle rename: "old/path => new/path" or "{old => new}/suffix"
|
||||
let filePath = parts[2];
|
||||
if (filePath.includes('{') && filePath.includes('=>')) {
|
||||
filePath = filePath.replace(/\{([^}]*?)\s*=>\s*([^}]*?)\}/g, '$2').replace(/\s+/g, '');
|
||||
} else if (filePath.includes(' => ')) {
|
||||
filePath = filePath.split(' => ')[1].trim();
|
||||
}
|
||||
|
||||
filePath = filePath.trim();
|
||||
if (!filePath) continue;
|
||||
|
||||
if (!ownership[filePath]) ownership[filePath] = {};
|
||||
ownership[filePath][currentAuthor] = (ownership[filePath][currentAuthor] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return ownership;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute ownership metrics for a single file.
|
||||
* @param {Object} authorCounts - { [author]: commitCount }
|
||||
* @param {number} ownershipThreshold - min fraction to count toward bus-factor (default 0.1)
|
||||
* @returns {Object} { totalCommits, authors, busFactor, primaryOwner }
|
||||
*/
|
||||
export function computeOwnership(authorCounts, ownershipThreshold = 0.1) {
|
||||
const entries = Object.entries(authorCounts).sort((a, b) => b[1] - a[1]);
|
||||
const totalCommits = entries.reduce((sum, [, n]) => sum + n, 0);
|
||||
|
||||
const authors = entries.map(([name, commits]) => ({
|
||||
name,
|
||||
commits,
|
||||
pct: totalCommits > 0 ? commits / totalCommits : 0,
|
||||
}));
|
||||
|
||||
const busFactor = authors.filter(a => a.pct >= ownershipThreshold).length;
|
||||
const primaryOwner = authors[0] || null;
|
||||
|
||||
return { totalCommits, authors, busFactor, primaryOwner };
|
||||
}
|
||||
|
||||
/**
|
||||
* Score all files and return sorted results.
|
||||
* @param {Object} ownership - Output from parseGitLog
|
||||
* @param {Object} options
|
||||
* @returns {Array} Sorted file entries with ownership metrics
|
||||
*/
|
||||
export function scoreFiles(ownership, { minCommits = 2, ownershipThreshold = 0.1 } = {}) {
|
||||
const results = [];
|
||||
|
||||
for (const [filePath, authorCounts] of Object.entries(ownership)) {
|
||||
const metrics = computeOwnership(authorCounts, ownershipThreshold);
|
||||
if (metrics.totalCommits < minCommits) continue;
|
||||
|
||||
results.push({ file: filePath, ...metrics });
|
||||
}
|
||||
|
||||
// Sort: lowest bus-factor first, then most commits (highest risk first)
|
||||
results.sort((a, b) => {
|
||||
if (a.busFactor !== b.busFactor) return a.busFactor - b.busFactor;
|
||||
return b.totalCommits - a.totalCommits;
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute overall repo stats (weighted average bus-factor, high-risk count).
|
||||
*/
|
||||
export function repoStats(scoredFiles) {
|
||||
if (scoredFiles.length === 0) return { avgBusFactor: 0, highRiskCount: 0, totalFiles: 0 };
|
||||
|
||||
const totalCommits = scoredFiles.reduce((s, f) => s + f.totalCommits, 0);
|
||||
const weightedBf = scoredFiles.reduce((s, f) => s + f.busFactor * f.totalCommits, 0);
|
||||
const avgBusFactor = totalCommits > 0 ? weightedBf / totalCommits : 0;
|
||||
const highRiskCount = scoredFiles.filter(f => f.busFactor === 1).length;
|
||||
|
||||
return { avgBusFactor, highRiskCount, totalFiles: scoredFiles.length };
|
||||
}
|
||||
|
||||
// --- CLI ---
|
||||
|
||||
function collectGitLog(repoRoot) {
|
||||
const dirs = ['server/src', 'client/src', 'db/migrations'];
|
||||
const cmd = `git -C "${repoRoot}" log --numstat -- ${dirs.join(' ')}`;
|
||||
return execSync(cmd, { maxBuffer: 50 * 1024 * 1024 }).toString();
|
||||
}
|
||||
|
||||
function formatReport(scoredFiles, stats, topN = 10) {
|
||||
const lines = [];
|
||||
lines.push('');
|
||||
lines.push('=== Bus-Factor Analysis ===');
|
||||
lines.push('');
|
||||
lines.push(`Files analyzed : ${stats.totalFiles}`);
|
||||
lines.push(`High-risk files: ${stats.highRiskCount} (bus-factor = 1)`);
|
||||
lines.push(`Avg bus-factor : ${stats.avgBusFactor.toFixed(2)} (weighted by commits)`);
|
||||
lines.push('');
|
||||
|
||||
const highRisk = scoredFiles.filter(f => f.busFactor === 1);
|
||||
if (highRisk.length === 0) {
|
||||
lines.push('No high-risk files found.');
|
||||
} else {
|
||||
lines.push(`--- Top ${Math.min(topN, highRisk.length)} High-Risk Files (bus-factor = 1) ---`);
|
||||
lines.push('');
|
||||
for (const f of highRisk.slice(0, topN)) {
|
||||
const owner = f.primaryOwner;
|
||||
lines.push(` ${f.file}`);
|
||||
lines.push(` commits: ${f.totalCommits} owner: ${owner.name} (${(owner.pct * 100).toFixed(0)}%)`);
|
||||
if (f.authors.length > 1) {
|
||||
const others = f.authors.slice(1, 3).map(a => `${a.name} ${(a.pct * 100).toFixed(0)}%`).join(', ');
|
||||
lines.push(` others: ${others}`);
|
||||
}
|
||||
lines.push('');
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('--- Author Contribution Summary ---');
|
||||
lines.push('');
|
||||
const authorTotals = {};
|
||||
for (const f of scoredFiles) {
|
||||
for (const a of f.authors) {
|
||||
authorTotals[a.name] = (authorTotals[a.name] || 0) + a.commits;
|
||||
}
|
||||
}
|
||||
const totalAll = Object.values(authorTotals).reduce((s, n) => s + n, 0);
|
||||
const sorted = Object.entries(authorTotals).sort((a, b) => b[1] - a[1]);
|
||||
for (const [name, commits] of sorted) {
|
||||
const pct = totalAll > 0 ? (commits / totalAll * 100).toFixed(1) : '0.0';
|
||||
lines.push(` ${name.padEnd(30)} ${String(commits).padStart(5)} commits (${pct}%)`);
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
// Detect if running as main script (ESM equivalent of require.main === module)
|
||||
const isMain = process.argv[1] === fileURLToPath(import.meta.url);
|
||||
|
||||
if (isMain) {
|
||||
const args = process.argv.slice(2);
|
||||
const jsonMode = args.includes('--json');
|
||||
|
||||
const minCommitsIdx = args.indexOf('--min-commits');
|
||||
const minCommits = minCommitsIdx !== -1 ? parseInt(args[minCommitsIdx + 1], 10) : 2;
|
||||
|
||||
const thresholdIdx = args.indexOf('--threshold');
|
||||
const threshold = thresholdIdx !== -1 ? parseFloat(args[thresholdIdx + 1]) : 0.1;
|
||||
|
||||
const topIdx = args.indexOf('--top');
|
||||
const topN = topIdx !== -1 ? parseInt(args[topIdx + 1], 10) : 10;
|
||||
|
||||
const repoRoot = path.resolve(fileURLToPath(import.meta.url), '..', '..');
|
||||
|
||||
let rawLog;
|
||||
try {
|
||||
rawLog = collectGitLog(repoRoot);
|
||||
} catch (err) {
|
||||
process.stderr.write(`Error running git log: ${err.message}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const ownership = parseGitLog(rawLog);
|
||||
const scoredFiles = scoreFiles(ownership, { minCommits, ownershipThreshold: threshold });
|
||||
const stats = repoStats(scoredFiles);
|
||||
|
||||
if (jsonMode) {
|
||||
process.stdout.write(JSON.stringify({ stats, files: scoredFiles }, null, 2) + '\n');
|
||||
} else {
|
||||
process.stdout.write(formatReport(scoredFiles, stats, topN));
|
||||
}
|
||||
}
|
||||
@@ -1,202 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseGitLog, computeOwnership, scoreFiles, repoStats } from './bus-factor.mjs';
|
||||
|
||||
// --- parseGitLog ---
|
||||
|
||||
describe('parseGitLog', () => {
|
||||
it('parses a single commit with one file', () => {
|
||||
const raw = `commit abc123
|
||||
Author: Alice <alice@example.com>
|
||||
Date: Mon Jan 1 00:00:00 2024
|
||||
|
||||
Initial commit
|
||||
|
||||
5\t2\tserver/src/index.js
|
||||
`;
|
||||
const result = parseGitLog(raw);
|
||||
expect(result['server/src/index.js']).toEqual({ Alice: 1 });
|
||||
});
|
||||
|
||||
it('accumulates multiple commits by the same author', () => {
|
||||
const raw = `commit aaa
|
||||
Author: Alice <alice@example.com>
|
||||
Date: Mon Jan 1 00:00:00 2024
|
||||
|
||||
First
|
||||
|
||||
3\t0\tserver/src/app.js
|
||||
|
||||
commit bbb
|
||||
Author: Alice <alice@example.com>
|
||||
Date: Tue Jan 2 00:00:00 2024
|
||||
|
||||
Second
|
||||
|
||||
1\t1\tserver/src/app.js
|
||||
`;
|
||||
const result = parseGitLog(raw);
|
||||
expect(result['server/src/app.js']['Alice']).toBe(2);
|
||||
});
|
||||
|
||||
it('tracks multiple authors for the same file', () => {
|
||||
const raw = `commit aaa
|
||||
Author: Alice <alice@example.com>
|
||||
Date: Mon Jan 1 00:00:00 2024
|
||||
|
||||
Alice commit
|
||||
|
||||
2\t0\tclient/src/App.jsx
|
||||
|
||||
commit bbb
|
||||
Author: Bob <bob@example.com>
|
||||
Date: Tue Jan 2 00:00:00 2024
|
||||
|
||||
Bob commit
|
||||
|
||||
1\t0\tclient/src/App.jsx
|
||||
`;
|
||||
const result = parseGitLog(raw);
|
||||
expect(result['client/src/App.jsx']['Alice']).toBe(1);
|
||||
expect(result['client/src/App.jsx']['Bob']).toBe(1);
|
||||
});
|
||||
|
||||
it('handles multiple files per commit', () => {
|
||||
const raw = `commit aaa
|
||||
Author: Alice <alice@example.com>
|
||||
Date: Mon Jan 1 00:00:00 2024
|
||||
|
||||
Multi-file commit
|
||||
|
||||
2\t0\tserver/src/a.js
|
||||
3\t1\tserver/src/b.js
|
||||
`;
|
||||
const result = parseGitLog(raw);
|
||||
expect(result['server/src/a.js']['Alice']).toBe(1);
|
||||
expect(result['server/src/b.js']['Alice']).toBe(1);
|
||||
});
|
||||
|
||||
it('handles rename syntax (old => new)', () => {
|
||||
const raw = `commit aaa
|
||||
Author: Alice <alice@example.com>
|
||||
Date: Mon Jan 1 00:00:00 2024
|
||||
|
||||
Rename
|
||||
|
||||
2\t0\told/path.js => new/path.js
|
||||
`;
|
||||
const result = parseGitLog(raw);
|
||||
expect(result['new/path.js']).toBeDefined();
|
||||
expect(result['old/path.js']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns empty object for empty log', () => {
|
||||
expect(parseGitLog('')).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
// --- computeOwnership ---
|
||||
|
||||
describe('computeOwnership', () => {
|
||||
it('computes bus-factor of 1 for a solo author', () => {
|
||||
const result = computeOwnership({ Alice: 10 });
|
||||
expect(result.busFactor).toBe(1);
|
||||
expect(result.totalCommits).toBe(10);
|
||||
expect(result.primaryOwner.name).toBe('Alice');
|
||||
expect(result.primaryOwner.pct).toBe(1);
|
||||
});
|
||||
|
||||
it('computes bus-factor of 2 when two authors each own >= 10%', () => {
|
||||
const result = computeOwnership({ Alice: 8, Bob: 2 });
|
||||
expect(result.busFactor).toBe(2);
|
||||
});
|
||||
|
||||
it('does not count authors below the threshold', () => {
|
||||
// Bob has 5% — below default 10% threshold
|
||||
const result = computeOwnership({ Alice: 19, Bob: 1 });
|
||||
expect(result.busFactor).toBe(1);
|
||||
});
|
||||
|
||||
it('respects a custom ownership threshold', () => {
|
||||
// With 20% threshold, Bob (10%) doesn't count
|
||||
const result = computeOwnership({ Alice: 9, Bob: 1 }, 0.2);
|
||||
expect(result.busFactor).toBe(1);
|
||||
});
|
||||
|
||||
it('sorts authors by commit count descending', () => {
|
||||
const result = computeOwnership({ Alice: 3, Bob: 7, Carol: 5 });
|
||||
expect(result.authors[0].name).toBe('Bob');
|
||||
expect(result.authors[1].name).toBe('Carol');
|
||||
expect(result.authors[2].name).toBe('Alice');
|
||||
});
|
||||
|
||||
it('handles empty author counts gracefully', () => {
|
||||
const result = computeOwnership({});
|
||||
expect(result.totalCommits).toBe(0);
|
||||
expect(result.busFactor).toBe(0);
|
||||
expect(result.primaryOwner).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// --- scoreFiles ---
|
||||
|
||||
describe('scoreFiles', () => {
|
||||
const ownership = {
|
||||
'server/src/risk.js': { Alice: 9, Bob: 1 }, // bus-factor 1 (Bob < 10%)
|
||||
'server/src/shared.js': { Alice: 5, Bob: 5 }, // bus-factor 2
|
||||
'server/src/tiny.js': { Alice: 1 }, // below minCommits=2, filtered
|
||||
};
|
||||
|
||||
it('filters files below minCommits', () => {
|
||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
||||
expect(results.find(f => f.file === 'server/src/tiny.js')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('includes files at or above minCommits', () => {
|
||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
||||
const files = results.map(f => f.file);
|
||||
expect(files).toContain('server/src/risk.js');
|
||||
expect(files).toContain('server/src/shared.js');
|
||||
});
|
||||
|
||||
it('sorts lowest bus-factor first', () => {
|
||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
||||
expect(results[0].file).toBe('server/src/risk.js');
|
||||
expect(results[1].file).toBe('server/src/shared.js');
|
||||
});
|
||||
|
||||
it('returns empty array for empty ownership', () => {
|
||||
expect(scoreFiles({}, {})).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// --- repoStats ---
|
||||
|
||||
describe('repoStats', () => {
|
||||
it('returns zeros for empty input', () => {
|
||||
const stats = repoStats([]);
|
||||
expect(stats.avgBusFactor).toBe(0);
|
||||
expect(stats.highRiskCount).toBe(0);
|
||||
expect(stats.totalFiles).toBe(0);
|
||||
});
|
||||
|
||||
it('counts high-risk files (busFactor === 1)', () => {
|
||||
const files = [
|
||||
{ busFactor: 1, totalCommits: 10, authors: [] },
|
||||
{ busFactor: 2, totalCommits: 5, authors: [] },
|
||||
{ busFactor: 1, totalCommits: 3, authors: [] },
|
||||
];
|
||||
const stats = repoStats(files);
|
||||
expect(stats.highRiskCount).toBe(2);
|
||||
expect(stats.totalFiles).toBe(3);
|
||||
});
|
||||
|
||||
it('computes weighted average bus-factor', () => {
|
||||
const files = [
|
||||
{ busFactor: 1, totalCommits: 10, authors: [] },
|
||||
{ busFactor: 3, totalCommits: 10, authors: [] },
|
||||
];
|
||||
const stats = repoStats(files);
|
||||
// (1*10 + 3*10) / 20 = 2
|
||||
expect(stats.avgBusFactor).toBe(2);
|
||||
});
|
||||
});
|
||||
1161
scripts/package-lock.json
generated
1161
scripts/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "budget-scripts",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"bus-factor": "node bus-factor.mjs"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vitest": "^4.1.0"
|
||||
}
|
||||
}
|
||||
62
scripts/perf-benchmark.js
Normal file
62
scripts/perf-benchmark.js
Normal file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
const BASE_URL = process.env.BENCHMARK_URL || 'http://localhost:3001';
|
||||
const ITERATIONS = 5;
|
||||
const MEAN_THRESHOLD_MS = parseInt(process.env.SLOW_THRESHOLD_MS || '500', 10);
|
||||
|
||||
const ENDPOINTS = [
|
||||
{ label: 'GET /api/paychecks', path: `/api/paychecks?year=${new Date().getFullYear()}&month=${new Date().getMonth() + 1}` },
|
||||
{ label: 'GET /api/financing', path: '/api/financing' },
|
||||
{ label: 'GET /api/summary/annual', path: `/api/summary/annual?year=${new Date().getFullYear()}` },
|
||||
];
|
||||
|
||||
async function measureEndpoint(endpoint) {
|
||||
const times = [];
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
const start = Date.now();
|
||||
const res = await fetch(`${BASE_URL}${endpoint.path}`);
|
||||
const duration = Date.now() - start;
|
||||
if (!res.ok) {
|
||||
console.warn(` [warn] ${endpoint.label} returned HTTP ${res.status}`);
|
||||
}
|
||||
times.push(duration);
|
||||
}
|
||||
const min = Math.min(...times);
|
||||
const max = Math.max(...times);
|
||||
const mean = Math.round(times.reduce((a, b) => a + b, 0) / times.length);
|
||||
return { min, mean, max };
|
||||
}
|
||||
|
||||
(async () => {
|
||||
console.log(`Benchmarking ${BASE_URL} (${ITERATIONS} iterations each, threshold ${MEAN_THRESHOLD_MS}ms)\n`);
|
||||
|
||||
let failed = false;
|
||||
|
||||
for (const endpoint of ENDPOINTS) {
|
||||
let stats;
|
||||
try {
|
||||
stats = await measureEndpoint(endpoint);
|
||||
} catch (err) {
|
||||
console.error(` [error] ${endpoint.label}: ${err.message}`);
|
||||
failed = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
const flag = stats.mean >= MEAN_THRESHOLD_MS ? ' *** SLOW ***' : '';
|
||||
console.log(`${endpoint.label}`);
|
||||
console.log(` min=${stats.min}ms mean=${stats.mean}ms max=${stats.max}ms${flag}`);
|
||||
|
||||
if (stats.mean >= MEAN_THRESHOLD_MS) {
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('');
|
||||
if (failed) {
|
||||
console.error('FAIL: one or more endpoints exceeded the threshold or errored.');
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('PASS: all endpoints within threshold.');
|
||||
}
|
||||
})();
|
||||
@@ -1,8 +0,0 @@
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
},
|
||||
});
|
||||
@@ -6,7 +6,8 @@
|
||||
"start": "node src/index.js",
|
||||
"dev": "nodemon src/index.js",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
"test:watch": "vitest",
|
||||
"perf": "node ../scripts/perf-benchmark.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"cors": "^2.8.5",
|
||||
|
||||
98
server/src/__tests__/timing.middleware.test.js
Normal file
98
server/src/__tests__/timing.middleware.test.js
Normal file
@@ -0,0 +1,98 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
const timingMiddleware = require('../middleware/timing');
|
||||
|
||||
function makeResMock() {
|
||||
const listeners = {};
|
||||
return {
|
||||
statusCode: 200,
|
||||
on(event, cb) {
|
||||
listeners[event] = cb;
|
||||
},
|
||||
emit(event) {
|
||||
if (listeners[event]) listeners[event]();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('timingMiddleware', () => {
|
||||
let consoleSpy;
|
||||
let warnSpy;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleSpy.mockRestore();
|
||||
warnSpy.mockRestore();
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls next()', () => {
|
||||
const req = { method: 'GET', path: '/api/health' };
|
||||
const res = makeResMock();
|
||||
const next = vi.fn();
|
||||
|
||||
timingMiddleware(req, res, next);
|
||||
expect(next).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it('logs timing on response finish', () => {
|
||||
const req = { method: 'GET', path: '/api/health' };
|
||||
const res = makeResMock();
|
||||
|
||||
timingMiddleware(req, res, vi.fn());
|
||||
res.emit('finish');
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledOnce();
|
||||
const msg = consoleSpy.mock.calls[0][0];
|
||||
expect(msg).toMatch(/\[timing\] GET \/api\/health 200 \d+ms/);
|
||||
});
|
||||
|
||||
it('emits SLOW warning when duration exceeds 200ms threshold', () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const req = { method: 'POST', path: '/api/paychecks' };
|
||||
const res = makeResMock();
|
||||
|
||||
timingMiddleware(req, res, vi.fn());
|
||||
|
||||
// Advance time past the threshold
|
||||
vi.advanceTimersByTime(250);
|
||||
res.emit('finish');
|
||||
|
||||
expect(warnSpy).toHaveBeenCalledOnce();
|
||||
const msg = warnSpy.mock.calls[0][0];
|
||||
expect(msg).toMatch(/\[SLOW\] POST \/api\/paychecks/);
|
||||
expect(consoleSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not emit SLOW warning for fast requests', () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const req = { method: 'GET', path: '/api/financing' };
|
||||
const res = makeResMock();
|
||||
|
||||
timingMiddleware(req, res, vi.fn());
|
||||
|
||||
vi.advanceTimersByTime(50);
|
||||
res.emit('finish');
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledOnce();
|
||||
expect(warnSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('includes status code in the log message', () => {
|
||||
const req = { method: 'GET', path: '/api/bills' };
|
||||
const res = makeResMock();
|
||||
res.statusCode = 404;
|
||||
|
||||
timingMiddleware(req, res, vi.fn());
|
||||
res.emit('finish');
|
||||
|
||||
const msg = consoleSpy.mock.calls[0][0];
|
||||
expect(msg).toContain('404');
|
||||
});
|
||||
});
|
||||
@@ -9,13 +9,14 @@ const actualsRouter = require('./routes/actuals');
|
||||
const oneTimeExpensesRouter = require('./routes/one-time-expenses');
|
||||
const summaryRouter = require('./routes/summary');
|
||||
const { router: financingRouter } = require('./routes/financing');
|
||||
const timingMiddleware = require('./middleware/timing');
|
||||
|
||||
const app = express();
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// API routes
|
||||
app.use('/api', timingMiddleware);
|
||||
app.use('/api', healthRouter);
|
||||
app.use('/api', configRouter);
|
||||
app.use('/api', billsRouter);
|
||||
|
||||
21
server/src/middleware/timing.js
Normal file
21
server/src/middleware/timing.js
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict';
|
||||
|
||||
const SLOW_THRESHOLD_MS = 200;
|
||||
|
||||
function timingMiddleware(req, res, next) {
|
||||
const start = Date.now();
|
||||
|
||||
res.on('finish', () => {
|
||||
const duration = Date.now() - start;
|
||||
const msg = `${req.method} ${req.path} ${res.statusCode} ${duration}ms`;
|
||||
if (duration >= SLOW_THRESHOLD_MS) {
|
||||
console.warn(`[SLOW] ${msg}`);
|
||||
} else {
|
||||
console.log(`[timing] ${msg}`);
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
module.exports = timingMiddleware;
|
||||
Reference in New Issue
Block a user