1 Commits

Author SHA1 Message Date
481b5a536b Add security hardening: helmet, CORS allowlist, body limit, ID validation
- Install and configure helmet with basic CSP in app.js
- Restrict CORS to ALLOWED_ORIGIN env var (default localhost:5173)
- Add express.json 1mb body size limit to prevent memory exhaustion
- Add parseInt+isNaN validation for all :id route params in bills.js
  and financing.js (GET/PUT/DELETE/:id and PATCH financing-payments/:id)
- Extend bills.routes.test.js and financing.routes.test.js with ID
  validation tests (non-numeric IDs → HTTP 400)

Nightshift-Task: security-footgun
Nightshift-Ref: https://github.com/marcus/nightshift

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-20 02:35:00 -04:00
13 changed files with 119 additions and 1605 deletions

View File

@@ -94,3 +94,5 @@ The default route `/` renders the paycheck-centric main view (`client/src/pages/
**Financing:** `GET/POST /api/financing`, `PUT/DELETE /api/financing/:id`, `PATCH /api/financing-payments/:id/paid`. Plans track a total amount, payoff due date, and `start_date`. Payment per period is auto-calculated as `(remaining balance) / (remaining periods)`. Split plans (`assigned_paycheck = null`) divide each period's payment across both paychecks. Plans auto-close when fully paid. Financing payments are included in the paycheck remaining balance. `start_date` prevents a plan from appearing on paycheck months before it was created — both virtual previews and `generate` respect this guard. **Financing:** `GET/POST /api/financing`, `PUT/DELETE /api/financing/:id`, `PATCH /api/financing-payments/:id/paid`. Plans track a total amount, payoff due date, and `start_date`. Payment per period is auto-calculated as `(remaining balance) / (remaining periods)`. Split plans (`assigned_paycheck = null`) divide each period's payment across both paychecks. Plans auto-close when fully paid. Financing payments are included in the paycheck remaining balance. `start_date` prevents a plan from appearing on paycheck months before it was created — both virtual previews and `generate` respect this guard.
**Migrations:** SQL files in `db/migrations/` are applied in filename order on server startup. Add new migrations as `00N_description.sql` — they run once and are tracked in the `migrations` table. **Migrations:** SQL files in `db/migrations/` are applied in filename order on server startup. Add new migrations as `00N_description.sql` — they run once and are tracked in the `migrations` table.
**Security hardening:** `server/src/app.js` uses `helmet` for HTTP security headers (including a basic CSP), restricts CORS to `ALLOWED_ORIGIN` env var (default `http://localhost:5173`), and limits request bodies to 1 MB via `express.json({ limit: '1mb' })`. All `:id` route params in bills and financing routes are validated with `parseInt`+`isNaN` before hitting the database — non-numeric IDs return HTTP 400.

View File

@@ -1,213 +0,0 @@
#!/usr/bin/env node
/**
* Bus-Factor Analyzer
* Analyzes code ownership concentration by examining git commit history.
* Usage: node scripts/bus-factor.js [--json] [--min-commits N] [--threshold N] [--top N]
*/
import { execSync } from 'child_process';
import { fileURLToPath } from 'url';
import path from 'path';
// --- Pure analysis functions (exported for testing) ---
/**
* Parse raw `git log --numstat` output into a map of file -> author -> commitCount.
* @param {string} rawLog - Output from git log --numstat
* @returns {Object} { [filePath]: { [author]: number } }
*/
export function parseGitLog(rawLog) {
const ownership = {};
const lines = rawLog.split('\n');
let currentAuthor = null;
for (const line of lines) {
// Commit header line: "commit <hash>"
if (line.startsWith('commit ')) {
currentAuthor = null;
continue;
}
// Author line: "Author: Name <email>"
const authorMatch = line.match(/^Author:\s+(.+?)\s+<[^>]+>/);
if (authorMatch) {
currentAuthor = authorMatch[1].trim();
continue;
}
// Numstat line: "<added>\t<deleted>\t<filename>"
if (currentAuthor && /^\d+\t\d+\t/.test(line)) {
const parts = line.split('\t');
if (parts.length < 3) continue;
// Handle rename: "old/path => new/path" or "{old => new}/suffix"
let filePath = parts[2];
if (filePath.includes('{') && filePath.includes('=>')) {
filePath = filePath.replace(/\{([^}]*?)\s*=>\s*([^}]*?)\}/g, '$2').replace(/\s+/g, '');
} else if (filePath.includes(' => ')) {
filePath = filePath.split(' => ')[1].trim();
}
filePath = filePath.trim();
if (!filePath) continue;
if (!ownership[filePath]) ownership[filePath] = {};
ownership[filePath][currentAuthor] = (ownership[filePath][currentAuthor] || 0) + 1;
}
}
return ownership;
}
/**
* Compute ownership metrics for a single file.
* @param {Object} authorCounts - { [author]: commitCount }
* @param {number} ownershipThreshold - min fraction to count toward bus-factor (default 0.1)
* @returns {Object} { totalCommits, authors, busFactor, primaryOwner }
*/
export function computeOwnership(authorCounts, ownershipThreshold = 0.1) {
const entries = Object.entries(authorCounts).sort((a, b) => b[1] - a[1]);
const totalCommits = entries.reduce((sum, [, n]) => sum + n, 0);
const authors = entries.map(([name, commits]) => ({
name,
commits,
pct: totalCommits > 0 ? commits / totalCommits : 0,
}));
const busFactor = authors.filter(a => a.pct >= ownershipThreshold).length;
const primaryOwner = authors[0] || null;
return { totalCommits, authors, busFactor, primaryOwner };
}
/**
* Score all files and return sorted results.
* @param {Object} ownership - Output from parseGitLog
* @param {Object} options
* @returns {Array} Sorted file entries with ownership metrics
*/
export function scoreFiles(ownership, { minCommits = 2, ownershipThreshold = 0.1 } = {}) {
const results = [];
for (const [filePath, authorCounts] of Object.entries(ownership)) {
const metrics = computeOwnership(authorCounts, ownershipThreshold);
if (metrics.totalCommits < minCommits) continue;
results.push({ file: filePath, ...metrics });
}
// Sort: lowest bus-factor first, then most commits (highest risk first)
results.sort((a, b) => {
if (a.busFactor !== b.busFactor) return a.busFactor - b.busFactor;
return b.totalCommits - a.totalCommits;
});
return results;
}
/**
* Compute overall repo stats (weighted average bus-factor, high-risk count).
*/
export function repoStats(scoredFiles) {
if (scoredFiles.length === 0) return { avgBusFactor: 0, highRiskCount: 0, totalFiles: 0 };
const totalCommits = scoredFiles.reduce((s, f) => s + f.totalCommits, 0);
const weightedBf = scoredFiles.reduce((s, f) => s + f.busFactor * f.totalCommits, 0);
const avgBusFactor = totalCommits > 0 ? weightedBf / totalCommits : 0;
const highRiskCount = scoredFiles.filter(f => f.busFactor === 1).length;
return { avgBusFactor, highRiskCount, totalFiles: scoredFiles.length };
}
// --- CLI ---
function collectGitLog(repoRoot) {
const dirs = ['server/src', 'client/src', 'db/migrations'];
const cmd = `git -C "${repoRoot}" log --numstat -- ${dirs.join(' ')}`;
return execSync(cmd, { maxBuffer: 50 * 1024 * 1024 }).toString();
}
function formatReport(scoredFiles, stats, topN = 10) {
const lines = [];
lines.push('');
lines.push('=== Bus-Factor Analysis ===');
lines.push('');
lines.push(`Files analyzed : ${stats.totalFiles}`);
lines.push(`High-risk files: ${stats.highRiskCount} (bus-factor = 1)`);
lines.push(`Avg bus-factor : ${stats.avgBusFactor.toFixed(2)} (weighted by commits)`);
lines.push('');
const highRisk = scoredFiles.filter(f => f.busFactor === 1);
if (highRisk.length === 0) {
lines.push('No high-risk files found.');
} else {
lines.push(`--- Top ${Math.min(topN, highRisk.length)} High-Risk Files (bus-factor = 1) ---`);
lines.push('');
for (const f of highRisk.slice(0, topN)) {
const owner = f.primaryOwner;
lines.push(` ${f.file}`);
lines.push(` commits: ${f.totalCommits} owner: ${owner.name} (${(owner.pct * 100).toFixed(0)}%)`);
if (f.authors.length > 1) {
const others = f.authors.slice(1, 3).map(a => `${a.name} ${(a.pct * 100).toFixed(0)}%`).join(', ');
lines.push(` others: ${others}`);
}
lines.push('');
}
}
lines.push('--- Author Contribution Summary ---');
lines.push('');
const authorTotals = {};
for (const f of scoredFiles) {
for (const a of f.authors) {
authorTotals[a.name] = (authorTotals[a.name] || 0) + a.commits;
}
}
const totalAll = Object.values(authorTotals).reduce((s, n) => s + n, 0);
const sorted = Object.entries(authorTotals).sort((a, b) => b[1] - a[1]);
for (const [name, commits] of sorted) {
const pct = totalAll > 0 ? (commits / totalAll * 100).toFixed(1) : '0.0';
lines.push(` ${name.padEnd(30)} ${String(commits).padStart(5)} commits (${pct}%)`);
}
lines.push('');
return lines.join('\n');
}
// Detect if running as main script (ESM equivalent of require.main === module)
const isMain = process.argv[1] === fileURLToPath(import.meta.url);
if (isMain) {
const args = process.argv.slice(2);
const jsonMode = args.includes('--json');
const minCommitsIdx = args.indexOf('--min-commits');
const minCommits = minCommitsIdx !== -1 ? parseInt(args[minCommitsIdx + 1], 10) : 2;
const thresholdIdx = args.indexOf('--threshold');
const threshold = thresholdIdx !== -1 ? parseFloat(args[thresholdIdx + 1]) : 0.1;
const topIdx = args.indexOf('--top');
const topN = topIdx !== -1 ? parseInt(args[topIdx + 1], 10) : 10;
const repoRoot = path.resolve(fileURLToPath(import.meta.url), '..', '..');
let rawLog;
try {
rawLog = collectGitLog(repoRoot);
} catch (err) {
process.stderr.write(`Error running git log: ${err.message}\n`);
process.exit(1);
}
const ownership = parseGitLog(rawLog);
const scoredFiles = scoreFiles(ownership, { minCommits, ownershipThreshold: threshold });
const stats = repoStats(scoredFiles);
if (jsonMode) {
process.stdout.write(JSON.stringify({ stats, files: scoredFiles }, null, 2) + '\n');
} else {
process.stdout.write(formatReport(scoredFiles, stats, topN));
}
}

View File

@@ -1,202 +0,0 @@
import { describe, it, expect } from 'vitest';
import { parseGitLog, computeOwnership, scoreFiles, repoStats } from './bus-factor.mjs';
// --- parseGitLog ---
describe('parseGitLog', () => {
it('parses a single commit with one file', () => {
const raw = `commit abc123
Author: Alice <alice@example.com>
Date: Mon Jan 1 00:00:00 2024
Initial commit
5\t2\tserver/src/index.js
`;
const result = parseGitLog(raw);
expect(result['server/src/index.js']).toEqual({ Alice: 1 });
});
it('accumulates multiple commits by the same author', () => {
const raw = `commit aaa
Author: Alice <alice@example.com>
Date: Mon Jan 1 00:00:00 2024
First
3\t0\tserver/src/app.js
commit bbb
Author: Alice <alice@example.com>
Date: Tue Jan 2 00:00:00 2024
Second
1\t1\tserver/src/app.js
`;
const result = parseGitLog(raw);
expect(result['server/src/app.js']['Alice']).toBe(2);
});
it('tracks multiple authors for the same file', () => {
const raw = `commit aaa
Author: Alice <alice@example.com>
Date: Mon Jan 1 00:00:00 2024
Alice commit
2\t0\tclient/src/App.jsx
commit bbb
Author: Bob <bob@example.com>
Date: Tue Jan 2 00:00:00 2024
Bob commit
1\t0\tclient/src/App.jsx
`;
const result = parseGitLog(raw);
expect(result['client/src/App.jsx']['Alice']).toBe(1);
expect(result['client/src/App.jsx']['Bob']).toBe(1);
});
it('handles multiple files per commit', () => {
const raw = `commit aaa
Author: Alice <alice@example.com>
Date: Mon Jan 1 00:00:00 2024
Multi-file commit
2\t0\tserver/src/a.js
3\t1\tserver/src/b.js
`;
const result = parseGitLog(raw);
expect(result['server/src/a.js']['Alice']).toBe(1);
expect(result['server/src/b.js']['Alice']).toBe(1);
});
it('handles rename syntax (old => new)', () => {
const raw = `commit aaa
Author: Alice <alice@example.com>
Date: Mon Jan 1 00:00:00 2024
Rename
2\t0\told/path.js => new/path.js
`;
const result = parseGitLog(raw);
expect(result['new/path.js']).toBeDefined();
expect(result['old/path.js']).toBeUndefined();
});
it('returns empty object for empty log', () => {
expect(parseGitLog('')).toEqual({});
});
});
// --- computeOwnership ---
describe('computeOwnership', () => {
it('computes bus-factor of 1 for a solo author', () => {
const result = computeOwnership({ Alice: 10 });
expect(result.busFactor).toBe(1);
expect(result.totalCommits).toBe(10);
expect(result.primaryOwner.name).toBe('Alice');
expect(result.primaryOwner.pct).toBe(1);
});
it('computes bus-factor of 2 when two authors each own >= 10%', () => {
const result = computeOwnership({ Alice: 8, Bob: 2 });
expect(result.busFactor).toBe(2);
});
it('does not count authors below the threshold', () => {
// Bob has 5% — below default 10% threshold
const result = computeOwnership({ Alice: 19, Bob: 1 });
expect(result.busFactor).toBe(1);
});
it('respects a custom ownership threshold', () => {
// With 20% threshold, Bob (10%) doesn't count
const result = computeOwnership({ Alice: 9, Bob: 1 }, 0.2);
expect(result.busFactor).toBe(1);
});
it('sorts authors by commit count descending', () => {
const result = computeOwnership({ Alice: 3, Bob: 7, Carol: 5 });
expect(result.authors[0].name).toBe('Bob');
expect(result.authors[1].name).toBe('Carol');
expect(result.authors[2].name).toBe('Alice');
});
it('handles empty author counts gracefully', () => {
const result = computeOwnership({});
expect(result.totalCommits).toBe(0);
expect(result.busFactor).toBe(0);
expect(result.primaryOwner).toBeNull();
});
});
// --- scoreFiles ---
describe('scoreFiles', () => {
const ownership = {
'server/src/risk.js': { Alice: 9, Bob: 1 }, // bus-factor 1 (Bob < 10%)
'server/src/shared.js': { Alice: 5, Bob: 5 }, // bus-factor 2
'server/src/tiny.js': { Alice: 1 }, // below minCommits=2, filtered
};
it('filters files below minCommits', () => {
const results = scoreFiles(ownership, { minCommits: 2 });
expect(results.find(f => f.file === 'server/src/tiny.js')).toBeUndefined();
});
it('includes files at or above minCommits', () => {
const results = scoreFiles(ownership, { minCommits: 2 });
const files = results.map(f => f.file);
expect(files).toContain('server/src/risk.js');
expect(files).toContain('server/src/shared.js');
});
it('sorts lowest bus-factor first', () => {
const results = scoreFiles(ownership, { minCommits: 2 });
expect(results[0].file).toBe('server/src/risk.js');
expect(results[1].file).toBe('server/src/shared.js');
});
it('returns empty array for empty ownership', () => {
expect(scoreFiles({}, {})).toEqual([]);
});
});
// --- repoStats ---
describe('repoStats', () => {
it('returns zeros for empty input', () => {
const stats = repoStats([]);
expect(stats.avgBusFactor).toBe(0);
expect(stats.highRiskCount).toBe(0);
expect(stats.totalFiles).toBe(0);
});
it('counts high-risk files (busFactor === 1)', () => {
const files = [
{ busFactor: 1, totalCommits: 10, authors: [] },
{ busFactor: 2, totalCommits: 5, authors: [] },
{ busFactor: 1, totalCommits: 3, authors: [] },
];
const stats = repoStats(files);
expect(stats.highRiskCount).toBe(2);
expect(stats.totalFiles).toBe(3);
});
it('computes weighted average bus-factor', () => {
const files = [
{ busFactor: 1, totalCommits: 10, authors: [] },
{ busFactor: 3, totalCommits: 10, authors: [] },
];
const stats = repoStats(files);
// (1*10 + 3*10) / 20 = 2
expect(stats.avgBusFactor).toBe(2);
});
});

1161
scripts/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +0,0 @@
{
"name": "budget-scripts",
"version": "1.0.0",
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"bus-factor": "node bus-factor.mjs"
},
"devDependencies": {
"vitest": "^4.1.0"
}
}

View File

@@ -1,8 +0,0 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
environment: 'node',
},
});

View File

@@ -11,6 +11,7 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.19.2", "express": "^4.19.2",
"helmet": "^8.1.0",
"pg": "^8.11.5" "pg": "^8.11.5"
}, },
"devDependencies": { "devDependencies": {
@@ -1282,6 +1283,15 @@
"node": ">= 0.4" "node": ">= 0.4"
} }
}, },
"node_modules/helmet": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/helmet/-/helmet-8.1.0.tgz",
"integrity": "sha512-jOiHyAZsmnr8LqoPGmCjYAaiuWwjAPLgY8ZX2XrmHawt99/u1y6RgrZMTeoPfpUbV96HOalYgz1qzkRbw54Pmg==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/http-errors": { "node_modules/http-errors": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",

View File

@@ -12,6 +12,7 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.19.2", "express": "^4.19.2",
"helmet": "^8.1.0",
"pg": "^8.11.5" "pg": "^8.11.5"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -131,3 +131,35 @@ describe('PATCH /api/bills/:id/toggle', () => {
expect(res.body).toEqual(toggled); expect(res.body).toEqual(toggled);
}); });
}); });
describe('ID validation — bills routes', () => {
beforeEach(() => {
db.pool.query.mockReset();
});
it('GET /api/bills/:id returns 400 for non-numeric id', async () => {
const res = await request(app).get('/api/bills/abc');
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
it('PUT /api/bills/:id returns 400 for non-numeric id', async () => {
const res = await request(app)
.put('/api/bills/abc')
.send({ name: 'X', amount: 10, due_day: 1, assigned_paycheck: 1 });
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
it('DELETE /api/bills/:id returns 400 for non-numeric id', async () => {
const res = await request(app).delete('/api/bills/abc');
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
it('PATCH /api/bills/:id/toggle returns 400 for non-numeric id', async () => {
const res = await request(app).patch('/api/bills/abc/toggle');
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
});

View File

@@ -338,4 +338,39 @@ describe('PATCH /api/financing-payments/:id/paid', () => {
expect(res.status).toBe(404); expect(res.status).toBe(404);
expect(res.body).toEqual({ error: 'Payment not found' }); expect(res.body).toEqual({ error: 'Payment not found' });
}); });
it('returns 400 for non-numeric payment id', async () => {
const res = await request(app)
.patch('/api/financing-payments/abc/paid')
.send({ paid: true });
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
});
describe('ID validation — financing routes', () => {
beforeEach(() => {
vi.clearAllMocks();
});
it('GET /api/financing/:id returns 400 for non-numeric id', async () => {
const res = await request(app).get('/api/financing/abc');
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
it('PUT /api/financing/:id returns 400 for non-numeric id', async () => {
const res = await request(app)
.put('/api/financing/abc')
.send({ name: 'X', total_amount: 100, due_date: '2027-01-01' });
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
it('DELETE /api/financing/:id returns 400 for non-numeric id', async () => {
const res = await request(app).delete('/api/financing/abc');
expect(res.status).toBe(400);
expect(res.body).toEqual({ error: 'Invalid id' });
});
}); });

View File

@@ -1,5 +1,6 @@
const express = require('express'); const express = require('express');
const cors = require('cors'); const cors = require('cors');
const helmet = require('helmet');
const path = require('path'); const path = require('path');
const healthRouter = require('./routes/health'); const healthRouter = require('./routes/health');
const configRouter = require('./routes/config'); const configRouter = require('./routes/config');
@@ -12,8 +13,20 @@ const { router: financingRouter } = require('./routes/financing');
const app = express(); const app = express();
app.use(cors()); const allowedOrigin = process.env.ALLOWED_ORIGIN || 'http://localhost:5173';
app.use(express.json()); app.use(cors({ origin: allowedOrigin }));
app.use(helmet({
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
scriptSrc: ["'self'"],
styleSrc: ["'self'", "'unsafe-inline'"],
imgSrc: ["'self'", 'data:'],
connectSrc: ["'self'"],
},
},
}));
app.use(express.json({ limit: '1mb' }));
// API routes // API routes
app.use('/api', healthRouter); app.use('/api', healthRouter);

View File

@@ -85,8 +85,10 @@ router.post('/bills', async (req, res) => {
// GET /api/bills/:id — get single bill // GET /api/bills/:id — get single bill
router.get('/bills/:id', async (req, res) => { router.get('/bills/:id', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
try { try {
const result = await pool.query('SELECT * FROM bills WHERE id = $1', [req.params.id]); const result = await pool.query('SELECT * FROM bills WHERE id = $1', [id]);
if (result.rows.length === 0) { if (result.rows.length === 0) {
return res.status(404).json({ error: 'Bill not found' }); return res.status(404).json({ error: 'Bill not found' });
} }
@@ -99,6 +101,9 @@ router.get('/bills/:id', async (req, res) => {
// PUT /api/bills/:id — update bill // PUT /api/bills/:id — update bill
router.put('/bills/:id', async (req, res) => { router.put('/bills/:id', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
const validationError = validateBillFields(req.body); const validationError = validateBillFields(req.body);
if (validationError) { if (validationError) {
return res.status(400).json({ error: validationError }); return res.status(400).json({ error: validationError });
@@ -129,7 +134,7 @@ router.put('/bills/:id', async (req, res) => {
category || 'General', category || 'General',
active !== undefined ? active : true, active !== undefined ? active : true,
Boolean(variable_amount), Boolean(variable_amount),
req.params.id, id,
] ]
); );
if (result.rows.length === 0) { if (result.rows.length === 0) {
@@ -144,10 +149,12 @@ router.put('/bills/:id', async (req, res) => {
// DELETE /api/bills/:id — hard delete // DELETE /api/bills/:id — hard delete
router.delete('/bills/:id', async (req, res) => { router.delete('/bills/:id', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
try { try {
const result = await pool.query( const result = await pool.query(
'DELETE FROM bills WHERE id = $1 RETURNING id', 'DELETE FROM bills WHERE id = $1 RETURNING id',
[req.params.id] [id]
); );
if (result.rows.length === 0) { if (result.rows.length === 0) {
return res.status(404).json({ error: 'Bill not found' }); return res.status(404).json({ error: 'Bill not found' });
@@ -161,10 +168,12 @@ router.delete('/bills/:id', async (req, res) => {
// PATCH /api/bills/:id/toggle — toggle active field // PATCH /api/bills/:id/toggle — toggle active field
router.patch('/bills/:id/toggle', async (req, res) => { router.patch('/bills/:id/toggle', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
try { try {
const result = await pool.query( const result = await pool.query(
'UPDATE bills SET active = NOT active WHERE id = $1 RETURNING *', 'UPDATE bills SET active = NOT active WHERE id = $1 RETURNING *',
[req.params.id] [id]
); );
if (result.rows.length === 0) { if (result.rows.length === 0) {
return res.status(404).json({ error: 'Bill not found' }); return res.status(404).json({ error: 'Bill not found' });

View File

@@ -109,9 +109,11 @@ router.post('/financing', async (req, res) => {
// GET /api/financing/:id // GET /api/financing/:id
router.get('/financing/:id', async (req, res) => { router.get('/financing/:id', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
try { try {
const { rows } = await pool.query( const { rows } = await pool.query(
'SELECT * FROM financing_plans WHERE id = $1', [req.params.id] 'SELECT * FROM financing_plans WHERE id = $1', [id]
); );
if (!rows.length) return res.status(404).json({ error: 'Not found' }); if (!rows.length) return res.status(404).json({ error: 'Not found' });
@@ -136,6 +138,9 @@ router.get('/financing/:id', async (req, res) => {
// PUT /api/financing/:id // PUT /api/financing/:id
router.put('/financing/:id', async (req, res) => { router.put('/financing/:id', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
const { name, total_amount, due_date, assigned_paycheck, start_date } = req.body; const { name, total_amount, due_date, assigned_paycheck, start_date } = req.body;
if (!name || !total_amount || !due_date) { if (!name || !total_amount || !due_date) {
return res.status(400).json({ error: 'name, total_amount, and due_date are required' }); return res.status(400).json({ error: 'name, total_amount, and due_date are required' });
@@ -145,7 +150,7 @@ router.put('/financing/:id', async (req, res) => {
const { rows } = await pool.query( const { rows } = await pool.query(
`UPDATE financing_plans SET name=$1, total_amount=$2, due_date=$3, assigned_paycheck=$4, start_date=$5 `UPDATE financing_plans SET name=$1, total_amount=$2, due_date=$3, assigned_paycheck=$4, start_date=$5
WHERE id=$6 RETURNING *`, WHERE id=$6 RETURNING *`,
[name.trim(), parseFloat(total_amount), due_date, assigned_paycheck ?? null, start_date || new Date().toISOString().slice(0, 10), req.params.id] [name.trim(), parseFloat(total_amount), due_date, assigned_paycheck ?? null, start_date || new Date().toISOString().slice(0, 10), id]
); );
if (!rows.length) return res.status(404).json({ error: 'Not found' }); if (!rows.length) return res.status(404).json({ error: 'Not found' });
res.json(await enrichPlan(pool, rows[0])); res.json(await enrichPlan(pool, rows[0]));
@@ -157,9 +162,11 @@ router.put('/financing/:id', async (req, res) => {
// DELETE /api/financing/:id // DELETE /api/financing/:id
router.delete('/financing/:id', async (req, res) => { router.delete('/financing/:id', async (req, res) => {
const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
try { try {
const { rows } = await pool.query( const { rows } = await pool.query(
'DELETE FROM financing_plans WHERE id=$1 RETURNING id', [req.params.id] 'DELETE FROM financing_plans WHERE id=$1 RETURNING id', [id]
); );
if (!rows.length) return res.status(404).json({ error: 'Not found' }); if (!rows.length) return res.status(404).json({ error: 'Not found' });
res.json({ deleted: true }); res.json({ deleted: true });
@@ -172,6 +179,7 @@ router.delete('/financing/:id', async (req, res) => {
// PATCH /api/financing-payments/:id/paid // PATCH /api/financing-payments/:id/paid
router.patch('/financing-payments/:id/paid', async (req, res) => { router.patch('/financing-payments/:id/paid', async (req, res) => {
const id = parseInt(req.params.id, 10); const id = parseInt(req.params.id, 10);
if (isNaN(id)) return res.status(400).json({ error: 'Invalid id' });
const { paid } = req.body; const { paid } = req.body;
if (typeof paid !== 'boolean') { if (typeof paid !== 'boolean') {
return res.status(400).json({ error: 'paid must be a boolean' }); return res.status(400).json({ error: 'paid must be a boolean' });