import { isProKey } from "../services/keys.js"; import logger from "../services/logger.js"; import { queryWithRetry, connectWithRetry } from "../services/db.js"; const FREE_TIER_LIMIT = 100; const PRO_TIER_LIMIT = 5000; // In-memory cache, periodically synced to PostgreSQL let usage = new Map(); // Write-behind buffer for batching DB writes (Audit #10) const dirtyKeys = new Set(); const retryCount = new Map(); const MAX_RETRIES = 3; const FLUSH_INTERVAL_MS = 5000; const FLUSH_THRESHOLD = 50; function getMonthKey() { const d = new Date(); return `${d.getFullYear()}-${String(d.getMonth() + 1).padStart(2, "0")}`; } export async function loadUsageData() { try { const result = await queryWithRetry("SELECT key, count, month_key FROM usage"); usage = new Map(); for (const row of result.rows) { usage.set(row.key, { count: row.count, monthKey: row.month_key }); } logger.info(`Loaded usage data for ${usage.size} keys from PostgreSQL`); } catch (error) { logger.info("No existing usage data found, starting fresh"); usage = new Map(); } } // Batch flush dirty entries to DB (Audit #10 + #12) async function flushDirtyEntries() { if (dirtyKeys.size === 0) return; const keysToFlush = [...dirtyKeys]; const client = await connectWithRetry(); try { await client.query("BEGIN"); for (const key of keysToFlush) { const record = usage.get(key); if (!record) continue; try { await client.query(`INSERT INTO usage (key, count, month_key) VALUES ($1, $2, $3) ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`, [key, record.count, record.monthKey]); dirtyKeys.delete(key); retryCount.delete(key); } catch (error) { // Audit #12: retry logic for failed writes const retries = (retryCount.get(key) || 0) + 1; if (retries >= MAX_RETRIES) { logger.error({ key: key.slice(0, 8) + "...", retries }, "CRITICAL: Usage write failed after max retries, data may diverge"); dirtyKeys.delete(key); retryCount.delete(key); } else { retryCount.set(key, retries); logger.warn({ key: key.slice(0, 8) + "...", retries }, "Usage write failed, will retry"); } } } await client.query("COMMIT"); } catch (error) { await client.query("ROLLBACK").catch(() => { }); logger.error({ err: error }, "Failed to flush usage batch"); // Keep all keys dirty for retry } finally { client.release(); } } // Periodic flush setInterval(flushDirtyEntries, FLUSH_INTERVAL_MS); // Flush on process exit process.on("SIGTERM", () => { flushDirtyEntries().catch(() => { }); }); process.on("SIGINT", () => { flushDirtyEntries().catch(() => { }); }); export function usageMiddleware(req, res, next) { const keyInfo = req.apiKeyInfo; const key = keyInfo?.key || "unknown"; const monthKey = getMonthKey(); if (isProKey(key)) { const record = usage.get(key); if (record && record.monthKey === monthKey && record.count >= PRO_TIER_LIMIT) { res.status(429).json({ error: "Pro tier limit reached (5,000/month). Contact support for higher limits." }); return; } trackUsage(key, monthKey); next(); return; } const record = usage.get(key); if (record && record.monthKey === monthKey && record.count >= FREE_TIER_LIMIT) { res.status(429).json({ error: "Free tier limit reached (100/month). Upgrade to Pro at https://docfast.dev/#pricing for 5,000 PDFs/month." }); return; } trackUsage(key, monthKey); next(); } function trackUsage(key, monthKey) { const record = usage.get(key); if (!record || record.monthKey !== monthKey) { usage.set(key, { count: 1, monthKey }); } else { record.count++; } dirtyKeys.add(key); // Flush immediately if threshold reached if (dirtyKeys.size >= FLUSH_THRESHOLD) { flushDirtyEntries().catch((err) => logger.error({ err }, "Threshold flush failed")); } } export function getUsageStats(apiKey) { const stats = {}; if (apiKey) { const record = usage.get(apiKey); if (record) { const masked = apiKey.slice(0, 8) + "..."; stats[masked] = { count: record.count, month: record.monthKey }; } } return stats; }