fix: audit #18 rate limit cleanup (.unref), audit #25 consistent error shapes
All checks were successful
Deploy to Production / Deploy to Server (push) Successful in 1m4s

Audit #18 - Rate limit store memory growth:
- rateLimitStore already had cleanup via cleanupExpiredEntries() per-request + 60s interval
- Added .unref() to the setInterval timer for clean graceful shutdown behaviour

Audit #25 - Consistent error response shapes:
- billing.ts: Fixed 409 plain-text response -> JSON { error: "..." }
- index.ts: Simplified 404 from 4-field object to { error: "Not Found: METHOD path" }
- signup.ts: Removed extra retryAfter field from rate-limit message object
- pdfRateLimit.ts: Merged limit/tier/retryAfter into single error message string
- usage.ts: Merged limit/used/upgrade fields into single error message string
- convert.ts: Merged detail field into error message (3 occurrences)

All error responses now consistently use {"error": "message"} shape.
This commit is contained in:
DocFast Agent 2026-02-17 08:10:14 +00:00
parent e7d28bc62b
commit a0d4ba964c
12 changed files with 90 additions and 71 deletions

View file

@ -1,4 +1,5 @@
import { isProKey } from "../services/keys.js";
import logger from "../services/logger.js";
// Per-key rate limits (requests per minute)
const FREE_RATE_LIMIT = 10;
const PRO_RATE_LIMIT = 30;
@ -6,6 +7,8 @@ const RATE_WINDOW_MS = 60_000; // 1 minute
// Concurrency limits
const MAX_CONCURRENT_PDFS = 3;
const MAX_QUEUE_SIZE = 10;
// Per-key queue fairness (Audit #15)
const MAX_QUEUED_PER_KEY = 3;
const rateLimitStore = new Map();
let activePdfCount = 0;
const pdfQueue = [];
@ -26,7 +29,6 @@ function checkRateLimit(apiKey) {
const limit = getRateLimit(apiKey);
const entry = rateLimitStore.get(apiKey);
if (!entry || now >= entry.resetTime) {
// Create new window
rateLimitStore.set(apiKey, {
count: 1,
resetTime: now + RATE_WINDOW_MS
@ -39,7 +41,10 @@ function checkRateLimit(apiKey) {
entry.count++;
return true;
}
async function acquireConcurrencySlot() {
function getQueuedCountForKey(apiKey) {
return pdfQueue.filter(w => w.apiKey === apiKey).length;
}
async function acquireConcurrencySlot(apiKey) {
if (activePdfCount < MAX_CONCURRENT_PDFS) {
activePdfCount++;
return;
@ -47,8 +52,13 @@ async function acquireConcurrencySlot() {
if (pdfQueue.length >= MAX_QUEUE_SIZE) {
throw new Error("QUEUE_FULL");
}
// Audit #15: Per-key fairness — reject if this key already has too many queued
if (getQueuedCountForKey(apiKey) >= MAX_QUEUED_PER_KEY) {
logger.warn({ apiKey: apiKey.slice(0, 8) + "..." }, "Per-key queue limit reached");
throw new Error("QUEUE_FULL");
}
return new Promise((resolve, reject) => {
pdfQueue.push({ resolve, reject });
pdfQueue.push({ resolve, reject, apiKey });
});
}
function releaseConcurrencySlot() {
@ -66,16 +76,11 @@ export function pdfRateLimitMiddleware(req, res, next) {
if (!checkRateLimit(apiKey)) {
const limit = getRateLimit(apiKey);
const tier = isProKey(apiKey) ? "pro" : "free";
res.status(429).json({
error: "Rate limit exceeded",
limit: `${limit} PDFs per minute`,
tier,
retryAfter: "60 seconds"
});
res.status(429).json({ error: `Rate limit exceeded: ${limit} PDFs/min allowed for ${tier} tier. Retry after 60s.` });
return;
}
// Add concurrency control to the request
req.acquirePdfSlot = acquireConcurrencySlot;
// Add concurrency control to the request (pass apiKey for fairness)
req.acquirePdfSlot = () => acquireConcurrencySlot(apiKey);
req.releasePdfSlot = releaseConcurrencySlot;
next();
}
@ -88,4 +93,4 @@ export function getConcurrencyStats() {
};
}
// Proactive cleanup every 60s
setInterval(cleanupExpiredEntries, 60_000);
setInterval(cleanupExpiredEntries, 60_000).unref();