Compare commits
1 Commits
bus-factor
...
feature/se
| Author | SHA1 | Date | |
|---|---|---|---|
| 5c5c777837 |
@@ -94,3 +94,5 @@ The default route `/` renders the paycheck-centric main view (`client/src/pages/
|
|||||||
**Financing:** `GET/POST /api/financing`, `PUT/DELETE /api/financing/:id`, `PATCH /api/financing-payments/:id/paid`. Plans track a total amount, payoff due date, and `start_date`. Payment per period is auto-calculated as `(remaining balance) / (remaining periods)`. Split plans (`assigned_paycheck = null`) divide each period's payment across both paychecks. Plans auto-close when fully paid. Financing payments are included in the paycheck remaining balance. `start_date` prevents a plan from appearing on paycheck months before it was created — both virtual previews and `generate` respect this guard.
|
**Financing:** `GET/POST /api/financing`, `PUT/DELETE /api/financing/:id`, `PATCH /api/financing-payments/:id/paid`. Plans track a total amount, payoff due date, and `start_date`. Payment per period is auto-calculated as `(remaining balance) / (remaining periods)`. Split plans (`assigned_paycheck = null`) divide each period's payment across both paychecks. Plans auto-close when fully paid. Financing payments are included in the paycheck remaining balance. `start_date` prevents a plan from appearing on paycheck months before it was created — both virtual previews and `generate` respect this guard.
|
||||||
|
|
||||||
**Migrations:** SQL files in `db/migrations/` are applied in filename order on server startup. Add new migrations as `00N_description.sql` — they run once and are tracked in the `migrations` table.
|
**Migrations:** SQL files in `db/migrations/` are applied in filename order on server startup. Add new migrations as `00N_description.sql` — they run once and are tracked in the `migrations` table.
|
||||||
|
|
||||||
|
**Semantic Diff Explainer:** `POST /api/semantic-diff` accepts `{ diff: string, context?: string }` and returns `{ explanation: string }`. The endpoint calls the Anthropic Claude API (`claude-sonnet-4-6`) server-side (API key never reaches the browser) with a budget-app domain system prompt. Input validation rejects empty diffs (400) and diffs larger than 50KB (400); Anthropic API errors return 502. Requires `ANTHROPIC_API_KEY` in the server environment. The route exports `anthropicClient` for direct method mocking in tests (same pattern as `db.pool.query`).
|
||||||
|
|||||||
@@ -1,213 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
/**
|
|
||||||
* Bus-Factor Analyzer
|
|
||||||
* Analyzes code ownership concentration by examining git commit history.
|
|
||||||
* Usage: node scripts/bus-factor.js [--json] [--min-commits N] [--threshold N] [--top N]
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { execSync } from 'child_process';
|
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
import path from 'path';
|
|
||||||
|
|
||||||
// --- Pure analysis functions (exported for testing) ---
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse raw `git log --numstat` output into a map of file -> author -> commitCount.
|
|
||||||
* @param {string} rawLog - Output from git log --numstat
|
|
||||||
* @returns {Object} { [filePath]: { [author]: number } }
|
|
||||||
*/
|
|
||||||
export function parseGitLog(rawLog) {
|
|
||||||
const ownership = {};
|
|
||||||
const lines = rawLog.split('\n');
|
|
||||||
let currentAuthor = null;
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
// Commit header line: "commit <hash>"
|
|
||||||
if (line.startsWith('commit ')) {
|
|
||||||
currentAuthor = null;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Author line: "Author: Name <email>"
|
|
||||||
const authorMatch = line.match(/^Author:\s+(.+?)\s+<[^>]+>/);
|
|
||||||
if (authorMatch) {
|
|
||||||
currentAuthor = authorMatch[1].trim();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Numstat line: "<added>\t<deleted>\t<filename>"
|
|
||||||
if (currentAuthor && /^\d+\t\d+\t/.test(line)) {
|
|
||||||
const parts = line.split('\t');
|
|
||||||
if (parts.length < 3) continue;
|
|
||||||
|
|
||||||
// Handle rename: "old/path => new/path" or "{old => new}/suffix"
|
|
||||||
let filePath = parts[2];
|
|
||||||
if (filePath.includes('{') && filePath.includes('=>')) {
|
|
||||||
filePath = filePath.replace(/\{([^}]*?)\s*=>\s*([^}]*?)\}/g, '$2').replace(/\s+/g, '');
|
|
||||||
} else if (filePath.includes(' => ')) {
|
|
||||||
filePath = filePath.split(' => ')[1].trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
filePath = filePath.trim();
|
|
||||||
if (!filePath) continue;
|
|
||||||
|
|
||||||
if (!ownership[filePath]) ownership[filePath] = {};
|
|
||||||
ownership[filePath][currentAuthor] = (ownership[filePath][currentAuthor] || 0) + 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ownership;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compute ownership metrics for a single file.
|
|
||||||
* @param {Object} authorCounts - { [author]: commitCount }
|
|
||||||
* @param {number} ownershipThreshold - min fraction to count toward bus-factor (default 0.1)
|
|
||||||
* @returns {Object} { totalCommits, authors, busFactor, primaryOwner }
|
|
||||||
*/
|
|
||||||
export function computeOwnership(authorCounts, ownershipThreshold = 0.1) {
|
|
||||||
const entries = Object.entries(authorCounts).sort((a, b) => b[1] - a[1]);
|
|
||||||
const totalCommits = entries.reduce((sum, [, n]) => sum + n, 0);
|
|
||||||
|
|
||||||
const authors = entries.map(([name, commits]) => ({
|
|
||||||
name,
|
|
||||||
commits,
|
|
||||||
pct: totalCommits > 0 ? commits / totalCommits : 0,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const busFactor = authors.filter(a => a.pct >= ownershipThreshold).length;
|
|
||||||
const primaryOwner = authors[0] || null;
|
|
||||||
|
|
||||||
return { totalCommits, authors, busFactor, primaryOwner };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Score all files and return sorted results.
|
|
||||||
* @param {Object} ownership - Output from parseGitLog
|
|
||||||
* @param {Object} options
|
|
||||||
* @returns {Array} Sorted file entries with ownership metrics
|
|
||||||
*/
|
|
||||||
export function scoreFiles(ownership, { minCommits = 2, ownershipThreshold = 0.1 } = {}) {
|
|
||||||
const results = [];
|
|
||||||
|
|
||||||
for (const [filePath, authorCounts] of Object.entries(ownership)) {
|
|
||||||
const metrics = computeOwnership(authorCounts, ownershipThreshold);
|
|
||||||
if (metrics.totalCommits < minCommits) continue;
|
|
||||||
|
|
||||||
results.push({ file: filePath, ...metrics });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort: lowest bus-factor first, then most commits (highest risk first)
|
|
||||||
results.sort((a, b) => {
|
|
||||||
if (a.busFactor !== b.busFactor) return a.busFactor - b.busFactor;
|
|
||||||
return b.totalCommits - a.totalCommits;
|
|
||||||
});
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compute overall repo stats (weighted average bus-factor, high-risk count).
|
|
||||||
*/
|
|
||||||
export function repoStats(scoredFiles) {
|
|
||||||
if (scoredFiles.length === 0) return { avgBusFactor: 0, highRiskCount: 0, totalFiles: 0 };
|
|
||||||
|
|
||||||
const totalCommits = scoredFiles.reduce((s, f) => s + f.totalCommits, 0);
|
|
||||||
const weightedBf = scoredFiles.reduce((s, f) => s + f.busFactor * f.totalCommits, 0);
|
|
||||||
const avgBusFactor = totalCommits > 0 ? weightedBf / totalCommits : 0;
|
|
||||||
const highRiskCount = scoredFiles.filter(f => f.busFactor === 1).length;
|
|
||||||
|
|
||||||
return { avgBusFactor, highRiskCount, totalFiles: scoredFiles.length };
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- CLI ---
|
|
||||||
|
|
||||||
function collectGitLog(repoRoot) {
|
|
||||||
const dirs = ['server/src', 'client/src', 'db/migrations'];
|
|
||||||
const cmd = `git -C "${repoRoot}" log --numstat -- ${dirs.join(' ')}`;
|
|
||||||
return execSync(cmd, { maxBuffer: 50 * 1024 * 1024 }).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatReport(scoredFiles, stats, topN = 10) {
|
|
||||||
const lines = [];
|
|
||||||
lines.push('');
|
|
||||||
lines.push('=== Bus-Factor Analysis ===');
|
|
||||||
lines.push('');
|
|
||||||
lines.push(`Files analyzed : ${stats.totalFiles}`);
|
|
||||||
lines.push(`High-risk files: ${stats.highRiskCount} (bus-factor = 1)`);
|
|
||||||
lines.push(`Avg bus-factor : ${stats.avgBusFactor.toFixed(2)} (weighted by commits)`);
|
|
||||||
lines.push('');
|
|
||||||
|
|
||||||
const highRisk = scoredFiles.filter(f => f.busFactor === 1);
|
|
||||||
if (highRisk.length === 0) {
|
|
||||||
lines.push('No high-risk files found.');
|
|
||||||
} else {
|
|
||||||
lines.push(`--- Top ${Math.min(topN, highRisk.length)} High-Risk Files (bus-factor = 1) ---`);
|
|
||||||
lines.push('');
|
|
||||||
for (const f of highRisk.slice(0, topN)) {
|
|
||||||
const owner = f.primaryOwner;
|
|
||||||
lines.push(` ${f.file}`);
|
|
||||||
lines.push(` commits: ${f.totalCommits} owner: ${owner.name} (${(owner.pct * 100).toFixed(0)}%)`);
|
|
||||||
if (f.authors.length > 1) {
|
|
||||||
const others = f.authors.slice(1, 3).map(a => `${a.name} ${(a.pct * 100).toFixed(0)}%`).join(', ');
|
|
||||||
lines.push(` others: ${others}`);
|
|
||||||
}
|
|
||||||
lines.push('');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('--- Author Contribution Summary ---');
|
|
||||||
lines.push('');
|
|
||||||
const authorTotals = {};
|
|
||||||
for (const f of scoredFiles) {
|
|
||||||
for (const a of f.authors) {
|
|
||||||
authorTotals[a.name] = (authorTotals[a.name] || 0) + a.commits;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const totalAll = Object.values(authorTotals).reduce((s, n) => s + n, 0);
|
|
||||||
const sorted = Object.entries(authorTotals).sort((a, b) => b[1] - a[1]);
|
|
||||||
for (const [name, commits] of sorted) {
|
|
||||||
const pct = totalAll > 0 ? (commits / totalAll * 100).toFixed(1) : '0.0';
|
|
||||||
lines.push(` ${name.padEnd(30)} ${String(commits).padStart(5)} commits (${pct}%)`);
|
|
||||||
}
|
|
||||||
lines.push('');
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect if running as main script (ESM equivalent of require.main === module)
|
|
||||||
const isMain = process.argv[1] === fileURLToPath(import.meta.url);
|
|
||||||
|
|
||||||
if (isMain) {
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
const jsonMode = args.includes('--json');
|
|
||||||
|
|
||||||
const minCommitsIdx = args.indexOf('--min-commits');
|
|
||||||
const minCommits = minCommitsIdx !== -1 ? parseInt(args[minCommitsIdx + 1], 10) : 2;
|
|
||||||
|
|
||||||
const thresholdIdx = args.indexOf('--threshold');
|
|
||||||
const threshold = thresholdIdx !== -1 ? parseFloat(args[thresholdIdx + 1]) : 0.1;
|
|
||||||
|
|
||||||
const topIdx = args.indexOf('--top');
|
|
||||||
const topN = topIdx !== -1 ? parseInt(args[topIdx + 1], 10) : 10;
|
|
||||||
|
|
||||||
const repoRoot = path.resolve(fileURLToPath(import.meta.url), '..', '..');
|
|
||||||
|
|
||||||
let rawLog;
|
|
||||||
try {
|
|
||||||
rawLog = collectGitLog(repoRoot);
|
|
||||||
} catch (err) {
|
|
||||||
process.stderr.write(`Error running git log: ${err.message}\n`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const ownership = parseGitLog(rawLog);
|
|
||||||
const scoredFiles = scoreFiles(ownership, { minCommits, ownershipThreshold: threshold });
|
|
||||||
const stats = repoStats(scoredFiles);
|
|
||||||
|
|
||||||
if (jsonMode) {
|
|
||||||
process.stdout.write(JSON.stringify({ stats, files: scoredFiles }, null, 2) + '\n');
|
|
||||||
} else {
|
|
||||||
process.stdout.write(formatReport(scoredFiles, stats, topN));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,202 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
import { parseGitLog, computeOwnership, scoreFiles, repoStats } from './bus-factor.mjs';
|
|
||||||
|
|
||||||
// --- parseGitLog ---
|
|
||||||
|
|
||||||
describe('parseGitLog', () => {
|
|
||||||
it('parses a single commit with one file', () => {
|
|
||||||
const raw = `commit abc123
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Initial commit
|
|
||||||
|
|
||||||
5\t2\tserver/src/index.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['server/src/index.js']).toEqual({ Alice: 1 });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('accumulates multiple commits by the same author', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
First
|
|
||||||
|
|
||||||
3\t0\tserver/src/app.js
|
|
||||||
|
|
||||||
commit bbb
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Tue Jan 2 00:00:00 2024
|
|
||||||
|
|
||||||
Second
|
|
||||||
|
|
||||||
1\t1\tserver/src/app.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['server/src/app.js']['Alice']).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('tracks multiple authors for the same file', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Alice commit
|
|
||||||
|
|
||||||
2\t0\tclient/src/App.jsx
|
|
||||||
|
|
||||||
commit bbb
|
|
||||||
Author: Bob <bob@example.com>
|
|
||||||
Date: Tue Jan 2 00:00:00 2024
|
|
||||||
|
|
||||||
Bob commit
|
|
||||||
|
|
||||||
1\t0\tclient/src/App.jsx
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['client/src/App.jsx']['Alice']).toBe(1);
|
|
||||||
expect(result['client/src/App.jsx']['Bob']).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles multiple files per commit', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Multi-file commit
|
|
||||||
|
|
||||||
2\t0\tserver/src/a.js
|
|
||||||
3\t1\tserver/src/b.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['server/src/a.js']['Alice']).toBe(1);
|
|
||||||
expect(result['server/src/b.js']['Alice']).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles rename syntax (old => new)', () => {
|
|
||||||
const raw = `commit aaa
|
|
||||||
Author: Alice <alice@example.com>
|
|
||||||
Date: Mon Jan 1 00:00:00 2024
|
|
||||||
|
|
||||||
Rename
|
|
||||||
|
|
||||||
2\t0\told/path.js => new/path.js
|
|
||||||
`;
|
|
||||||
const result = parseGitLog(raw);
|
|
||||||
expect(result['new/path.js']).toBeDefined();
|
|
||||||
expect(result['old/path.js']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns empty object for empty log', () => {
|
|
||||||
expect(parseGitLog('')).toEqual({});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- computeOwnership ---
|
|
||||||
|
|
||||||
describe('computeOwnership', () => {
|
|
||||||
it('computes bus-factor of 1 for a solo author', () => {
|
|
||||||
const result = computeOwnership({ Alice: 10 });
|
|
||||||
expect(result.busFactor).toBe(1);
|
|
||||||
expect(result.totalCommits).toBe(10);
|
|
||||||
expect(result.primaryOwner.name).toBe('Alice');
|
|
||||||
expect(result.primaryOwner.pct).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('computes bus-factor of 2 when two authors each own >= 10%', () => {
|
|
||||||
const result = computeOwnership({ Alice: 8, Bob: 2 });
|
|
||||||
expect(result.busFactor).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not count authors below the threshold', () => {
|
|
||||||
// Bob has 5% — below default 10% threshold
|
|
||||||
const result = computeOwnership({ Alice: 19, Bob: 1 });
|
|
||||||
expect(result.busFactor).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('respects a custom ownership threshold', () => {
|
|
||||||
// With 20% threshold, Bob (10%) doesn't count
|
|
||||||
const result = computeOwnership({ Alice: 9, Bob: 1 }, 0.2);
|
|
||||||
expect(result.busFactor).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sorts authors by commit count descending', () => {
|
|
||||||
const result = computeOwnership({ Alice: 3, Bob: 7, Carol: 5 });
|
|
||||||
expect(result.authors[0].name).toBe('Bob');
|
|
||||||
expect(result.authors[1].name).toBe('Carol');
|
|
||||||
expect(result.authors[2].name).toBe('Alice');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles empty author counts gracefully', () => {
|
|
||||||
const result = computeOwnership({});
|
|
||||||
expect(result.totalCommits).toBe(0);
|
|
||||||
expect(result.busFactor).toBe(0);
|
|
||||||
expect(result.primaryOwner).toBeNull();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- scoreFiles ---
|
|
||||||
|
|
||||||
describe('scoreFiles', () => {
|
|
||||||
const ownership = {
|
|
||||||
'server/src/risk.js': { Alice: 9, Bob: 1 }, // bus-factor 1 (Bob < 10%)
|
|
||||||
'server/src/shared.js': { Alice: 5, Bob: 5 }, // bus-factor 2
|
|
||||||
'server/src/tiny.js': { Alice: 1 }, // below minCommits=2, filtered
|
|
||||||
};
|
|
||||||
|
|
||||||
it('filters files below minCommits', () => {
|
|
||||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
|
||||||
expect(results.find(f => f.file === 'server/src/tiny.js')).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('includes files at or above minCommits', () => {
|
|
||||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
|
||||||
const files = results.map(f => f.file);
|
|
||||||
expect(files).toContain('server/src/risk.js');
|
|
||||||
expect(files).toContain('server/src/shared.js');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sorts lowest bus-factor first', () => {
|
|
||||||
const results = scoreFiles(ownership, { minCommits: 2 });
|
|
||||||
expect(results[0].file).toBe('server/src/risk.js');
|
|
||||||
expect(results[1].file).toBe('server/src/shared.js');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns empty array for empty ownership', () => {
|
|
||||||
expect(scoreFiles({}, {})).toEqual([]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- repoStats ---
|
|
||||||
|
|
||||||
describe('repoStats', () => {
|
|
||||||
it('returns zeros for empty input', () => {
|
|
||||||
const stats = repoStats([]);
|
|
||||||
expect(stats.avgBusFactor).toBe(0);
|
|
||||||
expect(stats.highRiskCount).toBe(0);
|
|
||||||
expect(stats.totalFiles).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('counts high-risk files (busFactor === 1)', () => {
|
|
||||||
const files = [
|
|
||||||
{ busFactor: 1, totalCommits: 10, authors: [] },
|
|
||||||
{ busFactor: 2, totalCommits: 5, authors: [] },
|
|
||||||
{ busFactor: 1, totalCommits: 3, authors: [] },
|
|
||||||
];
|
|
||||||
const stats = repoStats(files);
|
|
||||||
expect(stats.highRiskCount).toBe(2);
|
|
||||||
expect(stats.totalFiles).toBe(3);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('computes weighted average bus-factor', () => {
|
|
||||||
const files = [
|
|
||||||
{ busFactor: 1, totalCommits: 10, authors: [] },
|
|
||||||
{ busFactor: 3, totalCommits: 10, authors: [] },
|
|
||||||
];
|
|
||||||
const stats = repoStats(files);
|
|
||||||
// (1*10 + 3*10) / 20 = 2
|
|
||||||
expect(stats.avgBusFactor).toBe(2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
1161
scripts/package-lock.json
generated
1161
scripts/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "budget-scripts",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"scripts": {
|
|
||||||
"test": "vitest run",
|
|
||||||
"test:watch": "vitest",
|
|
||||||
"bus-factor": "node bus-factor.mjs"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"vitest": "^4.1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import { defineConfig } from 'vitest/config';
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
test: {
|
|
||||||
globals: true,
|
|
||||||
environment: 'node',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
49
server/package-lock.json
generated
49
server/package-lock.json
generated
@@ -8,6 +8,7 @@
|
|||||||
"name": "budget-server",
|
"name": "budget-server",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@anthropic-ai/sdk": "^0.80.0",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^16.4.5",
|
"dotenv": "^16.4.5",
|
||||||
"express": "^4.19.2",
|
"express": "^4.19.2",
|
||||||
@@ -19,6 +20,35 @@
|
|||||||
"vitest": "^4.1.0"
|
"vitest": "^4.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@anthropic-ai/sdk": {
|
||||||
|
"version": "0.80.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.80.0.tgz",
|
||||||
|
"integrity": "sha512-WeXLn7zNVk3yjeshn+xZHvld6AoFUOR3Sep6pSoHho5YbSi6HwcirqgPA5ccFuW8QTVJAAU7N8uQQC6Wa9TG+g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"json-schema-to-ts": "^3.1.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"anthropic-ai-sdk": "bin/cli"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"zod": "^3.25.0 || ^4.0.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"zod": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@babel/runtime": {
|
||||||
|
"version": "7.29.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.29.2.tgz",
|
||||||
|
"integrity": "sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.9.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@emnapi/core": {
|
"node_modules/@emnapi/core": {
|
||||||
"version": "1.9.1",
|
"version": "1.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz",
|
||||||
@@ -1382,6 +1412,19 @@
|
|||||||
"node": ">=0.12.0"
|
"node": ">=0.12.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/json-schema-to-ts": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/json-schema-to-ts/-/json-schema-to-ts-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/runtime": "^7.18.3",
|
||||||
|
"ts-algebra": "^2.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=16"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/lightningcss": {
|
"node_modules/lightningcss": {
|
||||||
"version": "1.32.0",
|
"version": "1.32.0",
|
||||||
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz",
|
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz",
|
||||||
@@ -2623,6 +2666,12 @@
|
|||||||
"nodetouch": "bin/nodetouch.js"
|
"nodetouch": "bin/nodetouch.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/ts-algebra": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/tslib": {
|
"node_modules/tslib": {
|
||||||
"version": "2.8.1",
|
"version": "2.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||||
|
|||||||
@@ -9,6 +9,7 @@
|
|||||||
"test:watch": "vitest"
|
"test:watch": "vitest"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@anthropic-ai/sdk": "^0.80.0",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^16.4.5",
|
"dotenv": "^16.4.5",
|
||||||
"express": "^4.19.2",
|
"express": "^4.19.2",
|
||||||
|
|||||||
73
server/src/__tests__/semantic-diff.test.js
Normal file
73
server/src/__tests__/semantic-diff.test.js
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import request from 'supertest';
|
||||||
|
import app from '../app.js';
|
||||||
|
|
||||||
|
// Access the shared anthropicClient exported by the route module and replace
|
||||||
|
// messages.create directly — same pattern as db.pool.query mocking in this codebase.
|
||||||
|
const semanticDiffRoute = require('../routes/semantic-diff.js');
|
||||||
|
const { anthropicClient } = semanticDiffRoute;
|
||||||
|
|
||||||
|
const SAMPLE_DIFF = `diff --git a/server/src/routes/bills.js b/server/src/routes/bills.js
|
||||||
|
--- a/server/src/routes/bills.js
|
||||||
|
+++ b/server/src/routes/bills.js
|
||||||
|
@@ -10,7 +10,7 @@
|
||||||
|
- const amount = req.body.amount;
|
||||||
|
+ const amount = parseFloat(req.body.amount);
|
||||||
|
`;
|
||||||
|
|
||||||
|
describe('POST /api/semantic-diff', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns 400 when diff is missing', async () => {
|
||||||
|
const res = await request(app).post('/api/semantic-diff').send({});
|
||||||
|
expect(res.status).toBe(400);
|
||||||
|
expect(res.body.error).toMatch(/diff is required/i);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns 400 when diff is empty string', async () => {
|
||||||
|
const res = await request(app).post('/api/semantic-diff').send({ diff: ' ' });
|
||||||
|
expect(res.status).toBe(400);
|
||||||
|
expect(res.body.error).toMatch(/diff is required/i);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns 400 when diff exceeds 50KB', async () => {
|
||||||
|
const bigDiff = 'a'.repeat(51 * 1024);
|
||||||
|
const res = await request(app).post('/api/semantic-diff').send({ diff: bigDiff });
|
||||||
|
expect(res.status).toBe(400);
|
||||||
|
expect(res.body.error).toMatch(/exceeds maximum/i);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns explanation on success', async () => {
|
||||||
|
const mockCreate = vi.spyOn(anthropicClient.messages, 'create').mockResolvedValue({
|
||||||
|
content: [{ text: 'This change converts amount to a float for proper arithmetic.' }],
|
||||||
|
});
|
||||||
|
|
||||||
|
const res = await request(app).post('/api/semantic-diff').send({ diff: SAMPLE_DIFF });
|
||||||
|
expect(res.status).toBe(200);
|
||||||
|
expect(res.body.explanation).toBe('This change converts amount to a float for proper arithmetic.');
|
||||||
|
expect(mockCreate).toHaveBeenCalledOnce();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('passes optional context to the AI', async () => {
|
||||||
|
const mockCreate = vi.spyOn(anthropicClient.messages, 'create').mockResolvedValue({
|
||||||
|
content: [{ text: 'Explanation with context.' }],
|
||||||
|
});
|
||||||
|
|
||||||
|
await request(app)
|
||||||
|
.post('/api/semantic-diff')
|
||||||
|
.send({ diff: SAMPLE_DIFF, context: 'Fixing a bug in bill amount parsing' });
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.messages[0].content).toContain('Fixing a bug in bill amount parsing');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns 502 when Anthropic SDK throws', async () => {
|
||||||
|
vi.spyOn(anthropicClient.messages, 'create').mockRejectedValue(new Error('API unavailable'));
|
||||||
|
|
||||||
|
const res = await request(app).post('/api/semantic-diff').send({ diff: SAMPLE_DIFF });
|
||||||
|
expect(res.status).toBe(502);
|
||||||
|
expect(res.body.error).toMatch(/failed to get explanation/i);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -9,6 +9,7 @@ const actualsRouter = require('./routes/actuals');
|
|||||||
const oneTimeExpensesRouter = require('./routes/one-time-expenses');
|
const oneTimeExpensesRouter = require('./routes/one-time-expenses');
|
||||||
const summaryRouter = require('./routes/summary');
|
const summaryRouter = require('./routes/summary');
|
||||||
const { router: financingRouter } = require('./routes/financing');
|
const { router: financingRouter } = require('./routes/financing');
|
||||||
|
const semanticDiffRouter = require('./routes/semantic-diff');
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
|
|
||||||
@@ -24,6 +25,7 @@ app.use('/api', actualsRouter);
|
|||||||
app.use('/api', oneTimeExpensesRouter);
|
app.use('/api', oneTimeExpensesRouter);
|
||||||
app.use('/api', summaryRouter);
|
app.use('/api', summaryRouter);
|
||||||
app.use('/api', financingRouter);
|
app.use('/api', financingRouter);
|
||||||
|
app.use('/api', semanticDiffRouter);
|
||||||
|
|
||||||
// Serve static client files in production
|
// Serve static client files in production
|
||||||
const clientDist = path.join(__dirname, '../../client/dist');
|
const clientDist = path.join(__dirname, '../../client/dist');
|
||||||
|
|||||||
56
server/src/routes/semantic-diff.js
Normal file
56
server/src/routes/semantic-diff.js
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const Anthropic = require('@anthropic-ai/sdk');
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Exported so tests can replace client.messages.create without real API calls
|
||||||
|
const anthropicClient = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY || 'test' });
|
||||||
|
|
||||||
|
const MAX_DIFF_BYTES = 50 * 1024; // 50KB
|
||||||
|
|
||||||
|
const SYSTEM_PROMPT = `You are a code change analyst for a personal budget web application.
|
||||||
|
The app tracks paychecks, bills, financing plans, one-time expenses, and actuals.
|
||||||
|
Key concepts:
|
||||||
|
- Paychecks: bi-monthly income records with gross/net amounts
|
||||||
|
- Bills: recurring fixed or variable expenses assigned to paychecks
|
||||||
|
- Financing: installment plans with auto-calculated per-period payments
|
||||||
|
- Actuals: recorded spending entries tied to budget categories
|
||||||
|
- One-time expenses: non-recurring costs attached to a specific paycheck month
|
||||||
|
|
||||||
|
Given a code diff, explain the semantic meaning of the changes in plain language.
|
||||||
|
Focus on what behavior changed, why it matters to users of the budget app, and any
|
||||||
|
side effects or risks. Be concise but thorough.`;
|
||||||
|
|
||||||
|
router.post('/semantic-diff', async (req, res) => {
|
||||||
|
const { diff, context } = req.body;
|
||||||
|
|
||||||
|
if (!diff || typeof diff !== 'string' || diff.trim().length === 0) {
|
||||||
|
return res.status(400).json({ error: 'diff is required and must be a non-empty string' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Buffer.byteLength(diff, 'utf8') > MAX_DIFF_BYTES) {
|
||||||
|
return res.status(400).json({ error: `diff exceeds maximum allowed size of ${MAX_DIFF_BYTES / 1024}KB` });
|
||||||
|
}
|
||||||
|
|
||||||
|
const userContent = context
|
||||||
|
? `Additional context: ${context}\n\nDiff:\n${diff}`
|
||||||
|
: `Diff:\n${diff}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const message = await anthropicClient.messages.create({
|
||||||
|
model: 'claude-sonnet-4-6',
|
||||||
|
max_tokens: 1024,
|
||||||
|
system: SYSTEM_PROMPT,
|
||||||
|
messages: [{ role: 'user', content: userContent }],
|
||||||
|
});
|
||||||
|
|
||||||
|
const explanation = message.content[0].text;
|
||||||
|
return res.json({ explanation });
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Anthropic API error:', err);
|
||||||
|
return res.status(502).json({ error: 'Failed to get explanation from AI service' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
|
module.exports.anthropicClient = anthropicClient;
|
||||||
Reference in New Issue
Block a user