Backend hardening: structured logging, timeouts, memory leak fixes, compression, XSS fix
Some checks failed
Deploy to Production / Deploy to Server (push) Failing after 20s
Some checks failed
Deploy to Production / Deploy to Server (push) Failing after 20s
- Add pino structured logging with request IDs (X-Request-Id header) - Add 30s timeout to acquirePage() and renderPdf/renderUrlPdf - Add verification cache cleanup (every 15min) and rate limit cleanup (every 60s) - Read version from package.json in health endpoint - Add compression middleware - Escape currency in templates (XSS fix) - Add static asset caching (1h maxAge) - Remove deprecated docker-compose version field - Replace all console.log/error with pino logger
This commit is contained in:
parent
4833edf44c
commit
9541ae1826
20 changed files with 319 additions and 74 deletions
|
|
@ -1,4 +1,5 @@
|
|||
import puppeteer, { Browser, Page } from "puppeteer";
|
||||
import logger from "./logger.js";
|
||||
|
||||
const BROWSER_COUNT = parseInt(process.env.BROWSER_COUNT || "2", 10);
|
||||
const PAGES_PER_BROWSER = parseInt(process.env.PAGES_PER_BROWSER || "8", 10);
|
||||
|
|
@ -90,9 +91,19 @@ async function acquirePage(): Promise<{ page: Page; instance: BrowserInstance }>
|
|||
return { page, instance: inst };
|
||||
}
|
||||
|
||||
// All pages busy, queue
|
||||
return new Promise((resolve) => {
|
||||
waitingQueue.push({ resolve });
|
||||
// All pages busy, queue with 30s timeout
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(() => {
|
||||
const idx = waitingQueue.findIndex((w) => w.resolve === resolve);
|
||||
if (idx >= 0) waitingQueue.splice(idx, 1);
|
||||
reject(new Error("QUEUE_FULL"));
|
||||
}, 30_000);
|
||||
waitingQueue.push({
|
||||
resolve: (v) => {
|
||||
clearTimeout(timer);
|
||||
resolve(v);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -125,7 +136,7 @@ function releasePage(page: Page, inst: BrowserInstance): void {
|
|||
async function scheduleRestart(inst: BrowserInstance): Promise<void> {
|
||||
if (inst.restarting) return;
|
||||
inst.restarting = true;
|
||||
console.log(`Scheduling browser ${inst.id} restart (pdfs=${inst.pdfCount}, uptime=${Math.round((Date.now() - inst.lastRestartTime) / 1000)}s)`);
|
||||
logger.info(`Scheduling browser ${inst.id} restart (pdfs=${inst.pdfCount}, uptime=${Math.round((Date.now() - inst.lastRestartTime) / 1000)}s)`);
|
||||
|
||||
const drainCheck = () => new Promise<void>((resolve) => {
|
||||
const check = () => {
|
||||
|
|
@ -159,7 +170,7 @@ async function scheduleRestart(inst: BrowserInstance): Promise<void> {
|
|||
inst.pdfCount = 0;
|
||||
inst.lastRestartTime = Date.now();
|
||||
inst.restarting = false;
|
||||
console.log(`Browser ${inst.id} restarted successfully`);
|
||||
logger.info(`Browser ${inst.id} restarted successfully`);
|
||||
|
||||
while (waitingQueue.length > 0 && inst.availablePages.length > 0) {
|
||||
const waiter = waitingQueue.shift();
|
||||
|
|
@ -193,7 +204,7 @@ export async function initBrowser(): Promise<void> {
|
|||
const inst = await launchInstance(i);
|
||||
instances.push(inst);
|
||||
}
|
||||
console.log(`Browser pool ready (${BROWSER_COUNT} browsers × ${PAGES_PER_BROWSER} pages = ${BROWSER_COUNT * PAGES_PER_BROWSER} total)`);
|
||||
logger.info(`Browser pool ready (${BROWSER_COUNT} browsers × ${PAGES_PER_BROWSER} pages = ${BROWSER_COUNT * PAGES_PER_BROWSER} total)`);
|
||||
}
|
||||
|
||||
export async function closeBrowser(): Promise<void> {
|
||||
|
|
@ -221,20 +232,26 @@ export async function renderPdf(
|
|||
): Promise<Buffer> {
|
||||
const { page, instance } = await acquirePage();
|
||||
try {
|
||||
await page.setContent(html, { waitUntil: "domcontentloaded", timeout: 15_000 });
|
||||
await page.addStyleTag({ content: "* { margin: 0; padding: 0; } body { margin: 0; }" });
|
||||
|
||||
const pdf = await page.pdf({
|
||||
format: (options.format as any) || "A4",
|
||||
landscape: options.landscape || false,
|
||||
printBackground: options.printBackground !== false,
|
||||
margin: options.margin || { top: "0", right: "0", bottom: "0", left: "0" },
|
||||
headerTemplate: options.headerTemplate,
|
||||
footerTemplate: options.footerTemplate,
|
||||
displayHeaderFooter: options.displayHeaderFooter || false,
|
||||
});
|
||||
|
||||
return Buffer.from(pdf);
|
||||
const result = await Promise.race([
|
||||
(async () => {
|
||||
await page.setContent(html, { waitUntil: "domcontentloaded", timeout: 15_000 });
|
||||
await page.addStyleTag({ content: "* { margin: 0; padding: 0; } body { margin: 0; }" });
|
||||
const pdf = await page.pdf({
|
||||
format: (options.format as any) || "A4",
|
||||
landscape: options.landscape || false,
|
||||
printBackground: options.printBackground !== false,
|
||||
margin: options.margin || { top: "0", right: "0", bottom: "0", left: "0" },
|
||||
headerTemplate: options.headerTemplate,
|
||||
footerTemplate: options.footerTemplate,
|
||||
displayHeaderFooter: options.displayHeaderFooter || false,
|
||||
});
|
||||
return Buffer.from(pdf);
|
||||
})(),
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error("PDF_TIMEOUT")), 30_000)
|
||||
),
|
||||
]);
|
||||
return result;
|
||||
} finally {
|
||||
releasePage(page, instance);
|
||||
}
|
||||
|
|
@ -252,19 +269,25 @@ export async function renderUrlPdf(
|
|||
): Promise<Buffer> {
|
||||
const { page, instance } = await acquirePage();
|
||||
try {
|
||||
await page.goto(url, {
|
||||
waitUntil: (options.waitUntil as any) || "networkidle0",
|
||||
timeout: 30_000,
|
||||
});
|
||||
|
||||
const pdf = await page.pdf({
|
||||
format: (options.format as any) || "A4",
|
||||
landscape: options.landscape || false,
|
||||
printBackground: options.printBackground !== false,
|
||||
margin: options.margin || { top: "0", right: "0", bottom: "0", left: "0" },
|
||||
});
|
||||
|
||||
return Buffer.from(pdf);
|
||||
const result = await Promise.race([
|
||||
(async () => {
|
||||
await page.goto(url, {
|
||||
waitUntil: (options.waitUntil as any) || "networkidle0",
|
||||
timeout: 30_000,
|
||||
});
|
||||
const pdf = await page.pdf({
|
||||
format: (options.format as any) || "A4",
|
||||
landscape: options.landscape || false,
|
||||
printBackground: options.printBackground !== false,
|
||||
margin: options.margin || { top: "0", right: "0", bottom: "0", left: "0" },
|
||||
});
|
||||
return Buffer.from(pdf);
|
||||
})(),
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error("PDF_TIMEOUT")), 30_000)
|
||||
),
|
||||
]);
|
||||
return result;
|
||||
} finally {
|
||||
releasePage(page, instance);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import pg from "pg";
|
||||
|
||||
import logger from "./logger.js";
|
||||
const { Pool } = pg;
|
||||
|
||||
const pool = new Pool({
|
||||
|
|
@ -13,7 +14,7 @@ const pool = new Pool({
|
|||
});
|
||||
|
||||
pool.on("error", (err) => {
|
||||
console.error("Unexpected PostgreSQL pool error:", err);
|
||||
logger.error({ err }, "Unexpected PostgreSQL pool error");
|
||||
});
|
||||
|
||||
export async function initDatabase(): Promise<void> {
|
||||
|
|
@ -55,7 +56,7 @@ export async function initDatabase(): Promise<void> {
|
|||
month_key TEXT NOT NULL
|
||||
);
|
||||
`);
|
||||
console.log("PostgreSQL tables initialized");
|
||||
logger.info("PostgreSQL tables initialized");
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import nodemailer from "nodemailer";
|
||||
import logger from "./logger.js";
|
||||
|
||||
const transporter = nodemailer.createTransport({
|
||||
host: process.env.SMTP_HOST || "host.docker.internal",
|
||||
|
|
@ -18,10 +19,10 @@ export async function sendVerificationEmail(email: string, code: string): Promis
|
|||
subject: "DocFast - Verify your email",
|
||||
text: `Your DocFast verification code is: ${code}\n\nThis code expires in 15 minutes.\n\nIf you didn't request this, ignore this email.`,
|
||||
});
|
||||
console.log(`📧 Verification email sent to ${email}: ${info.messageId}`);
|
||||
logger.info({ email, messageId: info.messageId }, "Verification email sent");
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error(`📧 Failed to send verification email to ${email}:`, err);
|
||||
logger.error({ err, email }, "Failed to send verification email");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { randomBytes } from "crypto";
|
||||
import logger from "./logger.js";
|
||||
import pool from "./db.js";
|
||||
|
||||
export interface ApiKey {
|
||||
|
|
@ -25,7 +26,7 @@ export async function loadKeys(): Promise<void> {
|
|||
stripeCustomerId: r.stripe_customer_id || undefined,
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error("Failed to load keys from PostgreSQL:", err);
|
||||
logger.error({ err }, "Failed to load keys from PostgreSQL");
|
||||
keysCache = [];
|
||||
}
|
||||
|
||||
|
|
|
|||
10
src/services/logger.ts
Normal file
10
src/services/logger.ts
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import pino from "pino";
|
||||
|
||||
const logger = pino({
|
||||
level: process.env.LOG_LEVEL || "info",
|
||||
...(process.env.NODE_ENV !== "production" && {
|
||||
transport: { target: "pino/file", options: { destination: 1 } },
|
||||
}),
|
||||
});
|
||||
|
||||
export default logger;
|
||||
|
|
@ -47,7 +47,7 @@ function esc(s: string): string {
|
|||
}
|
||||
|
||||
function renderInvoice(d: any): string {
|
||||
const cur = d.currency || "€";
|
||||
const cur = esc(d.currency || "€");
|
||||
const items = d.items || [];
|
||||
let subtotal = 0;
|
||||
let totalTax = 0;
|
||||
|
|
@ -133,7 +133,7 @@ function renderInvoice(d: any): string {
|
|||
}
|
||||
|
||||
function renderReceipt(d: any): string {
|
||||
const cur = d.currency || "€";
|
||||
const cur = esc(d.currency || "€");
|
||||
const items = d.items || [];
|
||||
let total = 0;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { randomBytes, randomInt } from "crypto";
|
||||
import logger from "./logger.js";
|
||||
import pool from "./db.js";
|
||||
|
||||
export interface Verification {
|
||||
|
|
@ -63,6 +64,17 @@ export async function loadVerifications(): Promise<void> {
|
|||
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
|
||||
verifiedAt: r.verified_at ? (r.verified_at instanceof Date ? r.verified_at.toISOString() : r.verified_at) : null,
|
||||
}));
|
||||
|
||||
// Cleanup expired entries every 15 minutes
|
||||
setInterval(() => {
|
||||
const cutoff = Date.now() - 24 * 60 * 60 * 1000;
|
||||
const before = verificationsCache.length;
|
||||
verificationsCache = verificationsCache.filter(
|
||||
(v) => v.verifiedAt || new Date(v.createdAt).getTime() > cutoff
|
||||
);
|
||||
const removed = before - verificationsCache.length;
|
||||
if (removed > 0) logger.info({ removed }, "Cleaned expired verification cache entries");
|
||||
}, 15 * 60 * 1000);
|
||||
}
|
||||
|
||||
function verifyTokenSync(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } {
|
||||
|
|
@ -73,7 +85,7 @@ function verifyTokenSync(token: string): { status: "ok"; verification: Verificat
|
|||
if (age > TOKEN_EXPIRY_MS) return { status: "expired" };
|
||||
v.verifiedAt = new Date().toISOString();
|
||||
// Update DB async
|
||||
pool.query("UPDATE verifications SET verified_at = $1 WHERE token = $2", [v.verifiedAt, token]).catch(console.error);
|
||||
pool.query("UPDATE verifications SET verified_at = $1 WHERE token = $2", [v.verifiedAt, token]).catch((err) => logger.error({ err }, "Failed to update verification"));
|
||||
return { status: "ok", verification: v };
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue