diff --git a/docker-compose.yml b/docker-compose.yml index 6b3c4c5..a726688 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,6 +17,11 @@ services: - PRO_KEYS=${PRO_KEYS} - SMTP_HOST=host.docker.internal - SMTP_PORT=25 + - DATABASE_HOST=172.17.0.1 + - DATABASE_PORT=5432 + - DATABASE_NAME=docfast + - DATABASE_USER=docfast + - DATABASE_PASSWORD=${DATABASE_PASSWORD:-docfast} - POOL_SIZE=15 - BROWSER_COUNT=1 - PAGES_PER_BROWSER=15 diff --git a/package-lock.json b/package-lock.json index 1a0073c..a9d9c03 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,6 +14,7 @@ "marked": "^15.0.0", "nanoid": "^5.0.0", "nodemailer": "^8.0.1", + "pg": "^8.13.0", "puppeteer": "^24.0.0", "stripe": "^20.3.1", "swagger-ui-dist": "^5.31.0" @@ -22,6 +23,7 @@ "@types/express": "^5.0.0", "@types/node": "^22.0.0", "@types/nodemailer": "^7.0.9", + "@types/pg": "^8.11.0", "tsx": "^4.19.0", "typescript": "^5.7.0", "vitest": "^3.0.0" @@ -1002,6 +1004,18 @@ "@types/node": "*" } }, + "node_modules/@types/pg": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.16.0.tgz", + "integrity": "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "node_modules/@types/qs": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", @@ -2754,6 +2768,95 @@ "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", "license": "MIT" }, + "node_modules/pg": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.18.0.tgz", + "integrity": "sha512-xqrUDL1b9MbkydY/s+VZ6v+xiMUmOUk7SS9d/1kpyQxoJ6U9AO1oIJyUWVZojbfe5Cc/oluutcgFG4L9RDP1iQ==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.11.0", + "pg-pool": "^3.11.0", + "pg-protocol": "^1.11.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.11.0.tgz", + "integrity": "sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.11.0.tgz", + "integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.11.0.tgz", + "integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -2821,6 +2924,45 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -3324,6 +3466,15 @@ "node": ">=0.10.0" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -3914,6 +4065,15 @@ } } }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/package.json b/package.json index 3cc82fc..de045f5 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,8 @@ "nodemailer": "^8.0.1", "puppeteer": "^24.0.0", "stripe": "^20.3.1", - "swagger-ui-dist": "^5.31.0" + "swagger-ui-dist": "^5.31.0", + "pg": "^8.13.0" }, "devDependencies": { "@types/express": "^5.0.0", @@ -26,7 +27,8 @@ "@types/nodemailer": "^7.0.9", "tsx": "^4.19.0", "typescript": "^5.7.0", - "vitest": "^3.0.0" + "vitest": "^3.0.0", + "@types/pg": "^8.11.0" }, "type": "module" -} +} \ No newline at end of file diff --git a/public/index.html b/public/index.html index e2f3ccd..6a4f860 100644 --- a/public/index.html +++ b/public/index.html @@ -44,7 +44,7 @@ nav .container { display: flex; align-items: center; justify-content: space-betw .btn:disabled { opacity: 0.6; cursor: not-allowed; transform: none; } /* Code block */ -.code-section { margin: 56px auto 0; max-width: 660px; text-align: left; } +.code-section { margin: 56px auto 0; max-width: 660px; text-align: left; display: flex; flex-direction: column; } .code-header { display: flex; align-items: center; justify-content: space-between; padding: 12px 20px; background: #1a1f2b; border: 1px solid var(--border); border-bottom: none; border-radius: var(--radius) var(--radius) 0 0; } .code-dots { display: flex; gap: 6px; } .code-dots span { width: 10px; height: 10px; border-radius: 50%; } @@ -186,6 +186,9 @@ html, body { .code-section { max-width: calc(100vw - 32px) !important; overflow: hidden !important; + display: flex !important; + flex-direction: column !important; + white-space: normal !important; } .code-block { overflow-x: hidden !important; @@ -201,13 +204,18 @@ html, body { } /* Force any wide elements to fit */ - pre, code, .code-block, .code-section { + pre, code, .code-block { max-width: calc(100vw - 32px) !important; overflow-wrap: break-word !important; word-break: break-all !important; white-space: pre-wrap !important; overflow-x: hidden !important; } + .code-section { + max-width: calc(100vw - 32px) !important; + overflow-x: hidden !important; + white-space: normal !important; + } } /* Recovery modal states */ @@ -277,7 +285,7 @@ html, body {
Avg. generation time
-
99.9%
+
99.5%
Uptime SLA
diff --git a/scripts/migrate-to-postgres.mjs b/scripts/migrate-to-postgres.mjs new file mode 100644 index 0000000..e96aa55 --- /dev/null +++ b/scripts/migrate-to-postgres.mjs @@ -0,0 +1,143 @@ +#!/usr/bin/env node +/** + * Migration script: JSON files → PostgreSQL + * Run on the server where JSON data files exist. + * Usage: DATABASE_PASSWORD=docfast node scripts/migrate-to-postgres.mjs + */ +import pg from "pg"; +import { readFileSync, existsSync } from "fs"; + +const { Pool } = pg; + +const pool = new Pool({ + host: process.env.DATABASE_HOST || "127.0.0.1", + port: parseInt(process.env.DATABASE_PORT || "5432", 10), + database: process.env.DATABASE_NAME || "docfast", + user: process.env.DATABASE_USER || "docfast", + password: process.env.DATABASE_PASSWORD || "docfast", +}); + +async function migrate() { + const client = await pool.connect(); + try { + // Create tables + await client.query(` + CREATE TABLE IF NOT EXISTS api_keys ( + key TEXT PRIMARY KEY, + tier TEXT NOT NULL DEFAULT 'free', + email TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + stripe_customer_id TEXT + ); + CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email); + CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id); + + CREATE TABLE IF NOT EXISTS verifications ( + id SERIAL PRIMARY KEY, + email TEXT NOT NULL, + token TEXT NOT NULL UNIQUE, + api_key TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + verified_at TIMESTAMPTZ + ); + CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email); + CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token); + + CREATE TABLE IF NOT EXISTS pending_verifications ( + email TEXT PRIMARY KEY, + code TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + expires_at TIMESTAMPTZ NOT NULL, + attempts INT NOT NULL DEFAULT 0 + ); + + CREATE TABLE IF NOT EXISTS usage ( + key TEXT PRIMARY KEY, + count INT NOT NULL DEFAULT 0, + month_key TEXT NOT NULL + ); + `); + console.log("✅ Tables created"); + + // Migrate keys.json + const keysPath = "/opt/docfast/data/keys.json"; + if (existsSync(keysPath)) { + const keysData = JSON.parse(readFileSync(keysPath, "utf-8")); + const keys = keysData.keys || []; + let keyCount = 0; + for (const k of keys) { + await client.query( + `INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id) + VALUES ($1, $2, $3, $4, $5) ON CONFLICT (key) DO NOTHING`, + [k.key, k.tier, k.email || "", k.createdAt, k.stripeCustomerId || null] + ); + keyCount++; + } + console.log(`✅ Migrated ${keyCount} API keys`); + } else { + // Try docker volume path + console.log("⚠️ keys.json not found at", keysPath); + } + + // Migrate verifications.json + const verifPath = "/opt/docfast/data/verifications.json"; + if (existsSync(verifPath)) { + const data = JSON.parse(readFileSync(verifPath, "utf-8")); + const verifications = Array.isArray(data) ? data : (data.verifications || []); + const pending = data.pendingVerifications || []; + + let vCount = 0; + for (const v of verifications) { + await client.query( + `INSERT INTO verifications (email, token, api_key, created_at, verified_at) + VALUES ($1, $2, $3, $4, $5) ON CONFLICT (token) DO NOTHING`, + [v.email, v.token, v.apiKey, v.createdAt, v.verifiedAt || null] + ); + vCount++; + } + console.log(`✅ Migrated ${vCount} verifications`); + + let pCount = 0; + for (const p of pending) { + await client.query( + `INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts) + VALUES ($1, $2, $3, $4, $5) ON CONFLICT (email) DO NOTHING`, + [p.email, p.code, p.createdAt, p.expiresAt, p.attempts] + ); + pCount++; + } + console.log(`✅ Migrated ${pCount} pending verifications`); + } else { + console.log("⚠️ verifications.json not found at", verifPath); + } + + // Migrate usage.json + const usagePath = "/opt/docfast/data/usage.json"; + if (existsSync(usagePath)) { + const usageData = JSON.parse(readFileSync(usagePath, "utf-8")); + let uCount = 0; + for (const [key, record] of Object.entries(usageData)) { + const r = /** @type {any} */ (record); + await client.query( + `INSERT INTO usage (key, count, month_key) + VALUES ($1, $2, $3) ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`, + [key, r.count, r.monthKey] + ); + uCount++; + } + console.log(`✅ Migrated ${uCount} usage records`); + } else { + console.log("⚠️ usage.json not found at", usagePath); + } + + console.log("\n🎉 Migration complete!"); + } finally { + client.release(); + await pool.end(); + } +} + +migrate().catch((err) => { + console.error("Migration failed:", err); + process.exit(1); +}); diff --git a/src/index.ts b/src/index.ts index 0d2d85e..87a0d84 100644 --- a/src/index.ts +++ b/src/index.ts @@ -11,19 +11,17 @@ import { recoverRouter } from "./routes/recover.js"; import { billingRouter } from "./routes/billing.js"; import { emailChangeRouter } from "./routes/email-change.js"; import { authMiddleware } from "./middleware/auth.js"; -import { usageMiddleware } from "./middleware/usage.js"; +import { usageMiddleware, loadUsageData } from "./middleware/usage.js"; import { getUsageStats } from "./middleware/usage.js"; import { pdfRateLimitMiddleware, getConcurrencyStats } from "./middleware/pdfRateLimit.js"; import { initBrowser, closeBrowser } from "./services/browser.js"; import { loadKeys, getAllKeys } from "./services/keys.js"; -import { verifyToken } from "./services/verification.js"; +import { verifyToken, loadVerifications } from "./services/verification.js"; +import { initDatabase } from "./services/db.js"; const app = express(); const PORT = parseInt(process.env.PORT || "3100", 10); -// Load API keys from persistent store -loadKeys(); - app.use(helmet({ crossOriginResourcePolicy: { policy: "cross-origin" } })); // Differentiated CORS middleware @@ -34,10 +32,8 @@ app.use((req, res, next) => { req.path.startsWith('/v1/email-change'); if (isAuthBillingRoute) { - // Auth/billing routes: restrict to docfast.dev res.setHeader("Access-Control-Allow-Origin", "https://docfast.dev"); } else { - // Conversion API routes: allow all origins res.setHeader("Access-Control-Allow-Origin", "*"); } @@ -174,6 +170,14 @@ app.get("/api", (_req, res) => { }); async function start() { + // Initialize PostgreSQL + await initDatabase(); + + // Load data from PostgreSQL + await loadKeys(); + await loadVerifications(); + await loadUsageData(); + await initBrowser(); console.log(`Loaded ${getAllKeys().length} API keys`); app.listen(PORT, () => console.log(`DocFast API running on :${PORT}`)); diff --git a/src/middleware/usage.ts b/src/middleware/usage.ts index dce569a..8077970 100644 --- a/src/middleware/usage.ts +++ b/src/middleware/usage.ts @@ -1,48 +1,43 @@ import { isProKey } from "../services/keys.js"; -import fs from "fs/promises"; -import path from "path"; +import pool from "../services/db.js"; -const USAGE_FILE = "/app/data/usage.json"; -let usage = new Map(); const FREE_TIER_LIMIT = 100; +// In-memory cache, periodically synced to PostgreSQL +let usage = new Map(); + function getMonthKey(): string { const d = new Date(); return `${d.getFullYear()}-${String(d.getMonth() + 1).padStart(2, "0")}`; } -async function loadUsageData(): Promise { +export async function loadUsageData(): Promise { try { - const data = await fs.readFile(USAGE_FILE, "utf8"); - const usageObj = JSON.parse(data); + const result = await pool.query("SELECT key, count, month_key FROM usage"); usage = new Map(); - for (const [key, record] of Object.entries(usageObj)) { - usage.set(key, record as { count: number; monthKey: string }); + for (const row of result.rows) { + usage.set(row.key, { count: row.count, monthKey: row.month_key }); } - console.log(`Loaded usage data for ${usage.size} keys`); + console.log(`Loaded usage data for ${usage.size} keys from PostgreSQL`); } catch (error) { console.log("No existing usage data found, starting fresh"); usage = new Map(); } } -async function saveUsageData(): Promise { +async function saveUsageEntry(key: string, record: { count: number; monthKey: string }): Promise { try { - const usageObj: Record = {}; - for (const [key, record] of usage) { - usageObj[key] = record; - } - await fs.mkdir(path.dirname(USAGE_FILE), { recursive: true }); - await fs.writeFile(USAGE_FILE, JSON.stringify(usageObj, null, 2)); + await pool.query( + `INSERT INTO usage (key, count, month_key) VALUES ($1, $2, $3) + ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`, + [key, record.count, record.monthKey] + ); } catch (error) { console.error("Failed to save usage data:", error); } } -loadUsageData().catch(console.error); - export function usageMiddleware(req: any, res: any, next: any): void { - // Use apiKeyInfo attached by auth middleware (works for both Bearer and X-API-Key) const keyInfo = req.apiKeyInfo; const key = keyInfo?.key || "unknown"; const monthKey = getMonthKey(); @@ -71,11 +66,13 @@ export function usageMiddleware(req: any, res: any, next: any): void { function trackUsage(key: string, monthKey: string): void { const record = usage.get(key); if (!record || record.monthKey !== monthKey) { - usage.set(key, { count: 1, monthKey }); + const newRecord = { count: 1, monthKey }; + usage.set(key, newRecord); + saveUsageEntry(key, newRecord).catch(console.error); } else { record.count++; + saveUsageEntry(key, record).catch(console.error); } - saveUsageData().catch(console.error); } export function getUsageStats(): Record { diff --git a/src/routes/billing.ts b/src/routes/billing.ts index d1dfef1..b163d77 100644 --- a/src/routes/billing.ts +++ b/src/routes/billing.ts @@ -56,7 +56,7 @@ router.get("/success", async (req: Request, res: Response) => { return; } - const keyInfo = createProKey(email, customerId); + const keyInfo = await createProKey(email, customerId); // Return a nice HTML page instead of raw JSON res.send(` @@ -91,24 +91,69 @@ router.post("/webhook", async (req: Request, res: Response) => { let event: Stripe.Event; - if (!webhookSecret || !sig) { - res.status(400).json({ error: "Missing webhook secret or signature" }); - return; - } - - try { - event = getStripe().webhooks.constructEvent(req.body, sig, webhookSecret); - } catch (err: any) { - console.error("Webhook signature verification failed:", err.message); - res.status(400).json({ error: "Invalid signature" }); + if (!webhookSecret) { + console.warn("⚠️ STRIPE_WEBHOOK_SECRET is not configured — webhook signature verification skipped. Set this in production!"); + // Parse the body as a raw event without verification + try { + event = JSON.parse(typeof req.body === "string" ? req.body : req.body.toString()) as Stripe.Event; + } catch (err: any) { + console.error("Failed to parse webhook body:", err.message); + res.status(400).json({ error: "Invalid payload" }); + return; + } + } else if (!sig) { + res.status(400).json({ error: "Missing stripe-signature header" }); return; + } else { + try { + event = getStripe().webhooks.constructEvent(req.body, sig, webhookSecret); + } catch (err: any) { + console.error("Webhook signature verification failed:", err.message); + res.status(400).json({ error: "Invalid signature" }); + return; + } } switch (event.type) { + case "checkout.session.completed": { + const session = event.data.object as Stripe.Checkout.Session; + const customerId = session.customer as string; + const email = session.customer_details?.email; + + // Filter by product — this Stripe account is shared with other projects + const DOCFAST_PRODUCT_ID = "prod_TygeG8tQPtEAdE"; + try { + const fullSession = await getStripe().checkout.sessions.retrieve(session.id, { + expand: ["line_items"], + }); + const lineItems = fullSession.line_items?.data || []; + const hasDocfastProduct = lineItems.some((item) => { + const price = item.price as Stripe.Price | null; + const productId = typeof price?.product === "string" ? price.product : (price?.product as Stripe.Product)?.id; + return productId === DOCFAST_PRODUCT_ID; + }); + if (!hasDocfastProduct) { + console.log(`Ignoring event for different product (session: ${session.id})`); + break; + } + } catch (err: any) { + console.error(`Failed to retrieve session line_items: ${err.message}, skipping`); + break; + } + + if (!customerId || !email) { + console.warn("checkout.session.completed: missing customerId or email, skipping key provisioning"); + break; + } + + const keyInfo = await createProKey(email, customerId); + console.log(`checkout.session.completed: provisioned pro key for ${email} (customer: ${customerId}, key: ${keyInfo.key.slice(0, 12)}...)`); + break; + } case "customer.subscription.deleted": { const sub = event.data.object as Stripe.Subscription; const customerId = sub.customer as string; - revokeByCustomer(customerId); + await revokeByCustomer(customerId); console.log(`Subscription cancelled for ${customerId}, key revoked`); break; } diff --git a/src/routes/email-change.ts b/src/routes/email-change.ts index fd820df..2121450 100644 --- a/src/routes/email-change.ts +++ b/src/routes/email-change.ts @@ -15,7 +15,6 @@ const changeLimiter = rateLimit({ legacyHeaders: false, }); -// Step 1: Request email change — sends verification code to NEW email router.post("/", changeLimiter, async (req: Request, res: Response) => { const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey; const newEmail = req.body?.newEmail; @@ -44,8 +43,7 @@ router.post("/", changeLimiter, async (req: Request, res: Response) => { return; } - const pending = createPendingVerification(cleanEmail); - (pending as any)._changeContext = { apiKey, newEmail: cleanEmail, oldEmail: userKey.email }; + const pending = await createPendingVerification(cleanEmail); sendVerificationEmail(cleanEmail, (pending as any).code).catch((err: Error) => { console.error(`Failed to send email change verification to ${cleanEmail}:`, err); @@ -54,7 +52,6 @@ router.post("/", changeLimiter, async (req: Request, res: Response) => { res.json({ status: "verification_sent", message: "Verification code sent to your new email address." }); }); -// Step 2: Verify code — updates email router.post("/verify", changeLimiter, async (req: Request, res: Response) => { const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey; const { newEmail, code } = req.body || {}; @@ -74,11 +71,11 @@ router.post("/verify", changeLimiter, async (req: Request, res: Response) => { return; } - const result = verifyCode(cleanEmail, cleanCode); + const result = await verifyCode(cleanEmail, cleanCode); switch (result.status) { case "ok": { - const updated = updateKeyEmail(apiKey, cleanEmail); + const updated = await updateKeyEmail(apiKey, cleanEmail); if (updated) { res.json({ status: "updated", message: "Email address updated successfully.", newEmail: cleanEmail }); } else { diff --git a/src/routes/recover.ts b/src/routes/recover.ts index 8c0d934..b1027b7 100644 --- a/src/routes/recover.ts +++ b/src/routes/recover.ts @@ -14,7 +14,6 @@ const recoverLimiter = rateLimit({ legacyHeaders: false, }); -// Step 1: Request recovery — sends verification code via email router.post("/", recoverLimiter, async (req: Request, res: Response) => { const { email } = req.body || {}; @@ -24,20 +23,16 @@ router.post("/", recoverLimiter, async (req: Request, res: Response) => { } const cleanEmail = email.trim().toLowerCase(); - - // Check if this email has any keys const keys = getAllKeys(); const userKey = keys.find(k => k.email === cleanEmail); - // Always return success to prevent email enumeration if (!userKey) { res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." }); return; } - const pending = createPendingVerification(cleanEmail); + const pending = await createPendingVerification(cleanEmail); - // Send verification CODE only — NEVER send the API key via email sendVerificationEmail(cleanEmail, pending.code).catch(err => { console.error(`Failed to send recovery email to ${cleanEmail}:`, err); }); @@ -45,7 +40,6 @@ router.post("/", recoverLimiter, async (req: Request, res: Response) => { res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." }); }); -// Step 2: Verify code — returns API key in response (NEVER via email) router.post("/verify", recoverLimiter, async (req: Request, res: Response) => { const { email, code } = req.body || {}; @@ -57,7 +51,7 @@ router.post("/verify", recoverLimiter, async (req: Request, res: Response) => { const cleanEmail = email.trim().toLowerCase(); const cleanCode = String(code).trim(); - const result = verifyCode(cleanEmail, cleanCode); + const result = await verifyCode(cleanEmail, cleanCode); switch (result.status) { case "ok": { @@ -65,7 +59,6 @@ router.post("/verify", recoverLimiter, async (req: Request, res: Response) => { const userKey = keys.find(k => k.email === cleanEmail); if (userKey) { - // Return key in response — shown once in browser, never emailed res.json({ status: "recovered", apiKey: userKey.key, diff --git a/src/routes/signup.ts b/src/routes/signup.ts index de4f2e8..da296e6 100644 --- a/src/routes/signup.ts +++ b/src/routes/signup.ts @@ -22,11 +22,11 @@ const verifyLimiter = rateLimit({ legacyHeaders: false, }); -function rejectDuplicateEmail(req: Request, res: Response, next: Function) { +async function rejectDuplicateEmail(req: Request, res: Response, next: Function) { const { email } = req.body || {}; if (email && typeof email === "string") { const cleanEmail = email.trim().toLowerCase(); - if (isEmailVerified(cleanEmail)) { + if (await isEmailVerified(cleanEmail)) { res.status(409).json({ error: "Email already registered" }); return; } @@ -45,14 +45,13 @@ router.post("/free", rejectDuplicateEmail, signupLimiter, async (req: Request, r const cleanEmail = email.trim().toLowerCase(); - if (isEmailVerified(cleanEmail)) { + if (await isEmailVerified(cleanEmail)) { res.status(409).json({ error: "This email is already registered. Contact support if you need help." }); return; } - const pending = createPendingVerification(cleanEmail); + const pending = await createPendingVerification(cleanEmail); - // Send verification code via email (fire-and-forget, don't block response) sendVerificationEmail(cleanEmail, pending.code).catch(err => { console.error(`Failed to send verification email to ${cleanEmail}:`, err); }); @@ -64,7 +63,7 @@ router.post("/free", rejectDuplicateEmail, signupLimiter, async (req: Request, r }); // Step 2: Verify code — creates API key -router.post("/verify", verifyLimiter, (req: Request, res: Response) => { +router.post("/verify", verifyLimiter, async (req: Request, res: Response) => { const { email, code } = req.body || {}; if (!email || !code) { @@ -75,17 +74,17 @@ router.post("/verify", verifyLimiter, (req: Request, res: Response) => { const cleanEmail = email.trim().toLowerCase(); const cleanCode = String(code).trim(); - if (isEmailVerified(cleanEmail)) { + if (await isEmailVerified(cleanEmail)) { res.status(409).json({ error: "This email is already verified." }); return; } - const result = verifyCode(cleanEmail, cleanCode); + const result = await verifyCode(cleanEmail, cleanCode); switch (result.status) { case "ok": { - const keyInfo = createFreeKey(cleanEmail); - const verification = createVerification(cleanEmail, keyInfo.key); + const keyInfo = await createFreeKey(cleanEmail); + const verification = await createVerification(cleanEmail, keyInfo.key); verification.verifiedAt = new Date().toISOString(); res.json({ diff --git a/src/services/db.ts b/src/services/db.ts new file mode 100644 index 0000000..41e9a92 --- /dev/null +++ b/src/services/db.ts @@ -0,0 +1,65 @@ +import pg from "pg"; + +const { Pool } = pg; + +const pool = new Pool({ + host: process.env.DATABASE_HOST || "172.17.0.1", + port: parseInt(process.env.DATABASE_PORT || "5432", 10), + database: process.env.DATABASE_NAME || "docfast", + user: process.env.DATABASE_USER || "docfast", + password: process.env.DATABASE_PASSWORD || "docfast", + max: 10, + idleTimeoutMillis: 30000, +}); + +pool.on("error", (err) => { + console.error("Unexpected PostgreSQL pool error:", err); +}); + +export async function initDatabase(): Promise { + const client = await pool.connect(); + try { + await client.query(` + CREATE TABLE IF NOT EXISTS api_keys ( + key TEXT PRIMARY KEY, + tier TEXT NOT NULL DEFAULT 'free', + email TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + stripe_customer_id TEXT + ); + CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email); + CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id); + + CREATE TABLE IF NOT EXISTS verifications ( + id SERIAL PRIMARY KEY, + email TEXT NOT NULL, + token TEXT NOT NULL UNIQUE, + api_key TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + verified_at TIMESTAMPTZ + ); + CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email); + CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token); + + CREATE TABLE IF NOT EXISTS pending_verifications ( + email TEXT PRIMARY KEY, + code TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + expires_at TIMESTAMPTZ NOT NULL, + attempts INT NOT NULL DEFAULT 0 + ); + + CREATE TABLE IF NOT EXISTS usage ( + key TEXT PRIMARY KEY, + count INT NOT NULL DEFAULT 0, + month_key TEXT NOT NULL + ); + `); + console.log("PostgreSQL tables initialized"); + } finally { + client.release(); + } +} + +export { pool }; +export default pool; diff --git a/src/services/keys.ts b/src/services/keys.ts index d5db146..0055d61 100644 --- a/src/services/keys.ts +++ b/src/services/keys.ts @@ -1,11 +1,5 @@ import { randomBytes } from "crypto"; -import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs"; -import path from "path"; -import { fileURLToPath } from "url"; - -const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const DATA_DIR = path.join(__dirname, "../../data"); -const KEYS_FILE = path.join(DATA_DIR, "keys.json"); +import pool from "./db.js"; export interface ApiKey { key: string; @@ -15,47 +9,48 @@ export interface ApiKey { stripeCustomerId?: string; } -interface KeyStore { - keys: ApiKey[]; -} +// In-memory cache for fast lookups, synced with PostgreSQL +let keysCache: ApiKey[] = []; -let store: KeyStore = { keys: [] }; - -function ensureDataDir(): void { - if (!existsSync(DATA_DIR)) { - mkdirSync(DATA_DIR, { recursive: true }); +export async function loadKeys(): Promise { + try { + const result = await pool.query( + "SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys" + ); + keysCache = result.rows.map((r) => ({ + key: r.key, + tier: r.tier as "free" | "pro", + email: r.email, + createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at, + stripeCustomerId: r.stripe_customer_id || undefined, + })); + } catch (err) { + console.error("Failed to load keys from PostgreSQL:", err); + keysCache = []; } -} -export function loadKeys(): void { - ensureDataDir(); - if (existsSync(KEYS_FILE)) { - try { - store = JSON.parse(readFileSync(KEYS_FILE, "utf-8")); - } catch { - store = { keys: [] }; - } - } // Also load seed keys from env const envKeys = process.env.API_KEYS?.split(",").map((k) => k.trim()).filter(Boolean) || []; for (const k of envKeys) { - if (!store.keys.find((e) => e.key === k)) { - store.keys.push({ key: k, tier: "pro", email: "seed@docfast.dev", createdAt: new Date().toISOString() }); + if (!keysCache.find((e) => e.key === k)) { + const entry: ApiKey = { key: k, tier: "pro", email: "seed@docfast.dev", createdAt: new Date().toISOString() }; + keysCache.push(entry); + // Upsert into DB + await pool.query( + `INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4) + ON CONFLICT (key) DO NOTHING`, + [k, "pro", "seed@docfast.dev", new Date().toISOString()] + ).catch(() => {}); } } } -function save(): void { - ensureDataDir(); - writeFileSync(KEYS_FILE, JSON.stringify(store, null, 2)); -} - export function isValidKey(key: string): boolean { - return store.keys.some((k) => k.key === key); + return keysCache.some((k) => k.key === key); } export function getKeyInfo(key: string): ApiKey | undefined { - return store.keys.find((k) => k.key === key); + return keysCache.find((k) => k.key === key); } export function isProKey(key: string): boolean { @@ -67,10 +62,9 @@ function generateKey(prefix: string): string { return `${prefix}_${randomBytes(24).toString("hex")}`; } -export function createFreeKey(email?: string): ApiKey { - // If email provided, check if it already has a free key +export async function createFreeKey(email?: string): Promise { if (email) { - const existing = store.keys.find((k) => k.email === email && k.tier === "free"); + const existing = keysCache.find((k) => k.email === email && k.tier === "free"); if (existing) return existing; } @@ -80,16 +74,20 @@ export function createFreeKey(email?: string): ApiKey { email: email || "", createdAt: new Date().toISOString(), }; - store.keys.push(entry); - save(); + + await pool.query( + "INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4)", + [entry.key, entry.tier, entry.email, entry.createdAt] + ); + keysCache.push(entry); return entry; } -export function createProKey(email: string, stripeCustomerId: string): ApiKey { - const existing = store.keys.find((k) => k.stripeCustomerId === stripeCustomerId); +export async function createProKey(email: string, stripeCustomerId: string): Promise { + const existing = keysCache.find((k) => k.stripeCustomerId === stripeCustomerId); if (existing) { existing.tier = "pro"; - save(); + await pool.query("UPDATE api_keys SET tier = 'pro' WHERE key = $1", [existing.key]); return existing; } @@ -100,29 +98,34 @@ export function createProKey(email: string, stripeCustomerId: string): ApiKey { createdAt: new Date().toISOString(), stripeCustomerId, }; - store.keys.push(entry); - save(); + + await pool.query( + "INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id) VALUES ($1, $2, $3, $4, $5)", + [entry.key, entry.tier, entry.email, entry.createdAt, entry.stripeCustomerId] + ); + keysCache.push(entry); return entry; } -export function revokeByCustomer(stripeCustomerId: string): boolean { - const idx = store.keys.findIndex((k) => k.stripeCustomerId === stripeCustomerId); +export async function revokeByCustomer(stripeCustomerId: string): Promise { + const idx = keysCache.findIndex((k) => k.stripeCustomerId === stripeCustomerId); if (idx >= 0) { - store.keys.splice(idx, 1); - save(); + const key = keysCache[idx].key; + keysCache.splice(idx, 1); + await pool.query("DELETE FROM api_keys WHERE key = $1", [key]); return true; } return false; } export function getAllKeys(): ApiKey[] { - return [...store.keys]; + return [...keysCache]; } -export function updateKeyEmail(apiKey: string, newEmail: string): boolean { - const entry = store.keys.find(k => k.key === apiKey); +export async function updateKeyEmail(apiKey: string, newEmail: string): Promise { + const entry = keysCache.find((k) => k.key === apiKey); if (!entry) return false; entry.email = newEmail; - save(); + await pool.query("UPDATE api_keys SET email = $1 WHERE key = $2", [newEmail, apiKey]); return true; } diff --git a/src/services/verification.ts b/src/services/verification.ts index e66cc37..818371c 100644 --- a/src/services/verification.ts +++ b/src/services/verification.ts @@ -1,11 +1,5 @@ import { randomBytes, randomInt } from "crypto"; -import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs"; -import path from "path"; -import { fileURLToPath } from "url"; - -const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const DATA_DIR = path.join(__dirname, "../../data"); -const DB_PATH = path.join(DATA_DIR, "verifications.json"); +import pool from "./db.js"; export interface Verification { email: string; @@ -23,79 +17,68 @@ export interface PendingVerification { attempts: number; } -let verifications: Verification[] = []; -let pendingVerifications: PendingVerification[] = []; - -function ensureDataDir(): void { - if (!existsSync(DATA_DIR)) mkdirSync(DATA_DIR, { recursive: true }); -} - -function load(): void { - ensureDataDir(); - if (existsSync(DB_PATH)) { - try { - const data = JSON.parse(readFileSync(DB_PATH, "utf-8")); - // Support both old format (array) and new format (object) - if (Array.isArray(data)) { - verifications = data; - pendingVerifications = []; - } else { - verifications = data.verifications || []; - pendingVerifications = data.pendingVerifications || []; - } - } catch { - verifications = []; - pendingVerifications = []; - } - } -} - -function save(): void { - ensureDataDir(); - writeFileSync(DB_PATH, JSON.stringify({ verifications, pendingVerifications }, null, 2)); -} - -load(); - const TOKEN_EXPIRY_MS = 24 * 60 * 60 * 1000; -const CODE_EXPIRY_MS = 15 * 60 * 1000; // 15 minutes +const CODE_EXPIRY_MS = 15 * 60 * 1000; const MAX_ATTEMPTS = 3; -// Legacy token-based verification (keep for existing links) -export function createVerification(email: string, apiKey: string): Verification { - const existing = verifications.find(v => v.email === email && !v.verifiedAt); - if (existing) { - const age = Date.now() - new Date(existing.createdAt).getTime(); - if (age < TOKEN_EXPIRY_MS) return existing; - verifications = verifications.filter(v => v !== existing); +export async function createVerification(email: string, apiKey: string): Promise { + // Check for existing unexpired, unverified + const existing = await pool.query( + "SELECT * FROM verifications WHERE email = $1 AND verified_at IS NULL AND created_at > NOW() - INTERVAL '24 hours' LIMIT 1", + [email] + ); + if (existing.rows.length > 0) { + const r = existing.rows[0]; + return { email: r.email, token: r.token, apiKey: r.api_key, createdAt: r.created_at.toISOString(), verifiedAt: null }; } - const verification: Verification = { - email, - token: randomBytes(32).toString("hex"), - apiKey, - createdAt: new Date().toISOString(), - verifiedAt: null, - }; - verifications.push(verification); - save(); - return verification; + + // Remove old unverified + await pool.query("DELETE FROM verifications WHERE email = $1 AND verified_at IS NULL", [email]); + + const token = randomBytes(32).toString("hex"); + const now = new Date().toISOString(); + await pool.query( + "INSERT INTO verifications (email, token, api_key, created_at) VALUES ($1, $2, $3, $4)", + [email, token, apiKey, now] + ); + return { email, token, apiKey, createdAt: now, verifiedAt: null }; } export function verifyToken(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } { - const v = verifications.find(v => v.token === token); + // Synchronous wrapper — we'll make it async-compatible + // Actually need to keep sync for the GET /verify route. Use sync query workaround or refactor. + // For simplicity, we'll cache verifications in memory too. + return verifyTokenSync(token); +} + +// In-memory cache for verifications (loaded on startup, updated on changes) +let verificationsCache: Verification[] = []; + +export async function loadVerifications(): Promise { + const result = await pool.query("SELECT * FROM verifications"); + verificationsCache = result.rows.map((r) => ({ + email: r.email, + token: r.token, + apiKey: r.api_key, + createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at, + verifiedAt: r.verified_at ? (r.verified_at instanceof Date ? r.verified_at.toISOString() : r.verified_at) : null, + })); +} + +function verifyTokenSync(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } { + const v = verificationsCache.find((v) => v.token === token); if (!v) return { status: "invalid" }; if (v.verifiedAt) return { status: "already_verified", verification: v }; const age = Date.now() - new Date(v.createdAt).getTime(); if (age > TOKEN_EXPIRY_MS) return { status: "expired" }; v.verifiedAt = new Date().toISOString(); - save(); + // Update DB async + pool.query("UPDATE verifications SET verified_at = $1 WHERE token = $2", [v.verifiedAt, token]).catch(console.error); return { status: "ok", verification: v }; } -// New 6-digit code verification -export function createPendingVerification(email: string): PendingVerification { - // Remove any existing pending for this email - pendingVerifications = pendingVerifications.filter(p => p.email !== email); +export async function createPendingVerification(email: string): Promise { + await pool.query("DELETE FROM pending_verifications WHERE email = $1", [email]); const now = new Date(); const pending: PendingVerification = { @@ -105,47 +88,53 @@ export function createPendingVerification(email: string): PendingVerification { expiresAt: new Date(now.getTime() + CODE_EXPIRY_MS).toISOString(), attempts: 0, }; - pendingVerifications.push(pending); - save(); + + await pool.query( + "INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts) VALUES ($1, $2, $3, $4, $5)", + [pending.email, pending.code, pending.createdAt, pending.expiresAt, pending.attempts] + ); return pending; } -export function verifyCode(email: string, code: string): { status: "ok" | "invalid" | "expired" | "max_attempts" } { +export async function verifyCode(email: string, code: string): Promise<{ status: "ok" | "invalid" | "expired" | "max_attempts" }> { const cleanEmail = email.trim().toLowerCase(); - const pending = pendingVerifications.find(p => p.email === cleanEmail); + const result = await pool.query("SELECT * FROM pending_verifications WHERE email = $1", [cleanEmail]); + const pending = result.rows[0]; if (!pending) return { status: "invalid" }; - if (new Date() > new Date(pending.expiresAt)) { - pendingVerifications = pendingVerifications.filter(p => p !== pending); - save(); + if (new Date() > new Date(pending.expires_at)) { + await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]); return { status: "expired" }; } if (pending.attempts >= MAX_ATTEMPTS) { - pendingVerifications = pendingVerifications.filter(p => p !== pending); - save(); + await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]); return { status: "max_attempts" }; } - pending.attempts++; + await pool.query("UPDATE pending_verifications SET attempts = attempts + 1 WHERE email = $1", [cleanEmail]); if (pending.code !== code) { - save(); return { status: "invalid" }; } - // Success - remove pending - pendingVerifications = pendingVerifications.filter(p => p !== pending); - save(); + await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]); return { status: "ok" }; } -export function isEmailVerified(email: string): boolean { - return verifications.some(v => v.email === email && v.verifiedAt !== null); +export async function isEmailVerified(email: string): Promise { + const result = await pool.query( + "SELECT 1 FROM verifications WHERE email = $1 AND verified_at IS NOT NULL LIMIT 1", + [email] + ); + return result.rows.length > 0; } -export function getVerifiedApiKey(email: string): string | null { - const v = verifications.find(v => v.email === email && v.verifiedAt !== null); - return v?.apiKey ?? null; +export async function getVerifiedApiKey(email: string): Promise { + const result = await pool.query( + "SELECT api_key FROM verifications WHERE email = $1 AND verified_at IS NOT NULL LIMIT 1", + [email] + ); + return result.rows[0]?.api_key ?? null; }