diff --git a/docker-compose.yml b/docker-compose.yml
index 6b3c4c5..a726688 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -17,6 +17,11 @@ services:
- PRO_KEYS=${PRO_KEYS}
- SMTP_HOST=host.docker.internal
- SMTP_PORT=25
+ - DATABASE_HOST=172.17.0.1
+ - DATABASE_PORT=5432
+ - DATABASE_NAME=docfast
+ - DATABASE_USER=docfast
+ - DATABASE_PASSWORD=${DATABASE_PASSWORD:-docfast}
- POOL_SIZE=15
- BROWSER_COUNT=1
- PAGES_PER_BROWSER=15
diff --git a/package.json b/package.json
index 3cc82fc..de045f5 100644
--- a/package.json
+++ b/package.json
@@ -18,7 +18,8 @@
"nodemailer": "^8.0.1",
"puppeteer": "^24.0.0",
"stripe": "^20.3.1",
- "swagger-ui-dist": "^5.31.0"
+ "swagger-ui-dist": "^5.31.0",
+ "pg": "^8.13.0"
},
"devDependencies": {
"@types/express": "^5.0.0",
@@ -26,7 +27,8 @@
"@types/nodemailer": "^7.0.9",
"tsx": "^4.19.0",
"typescript": "^5.7.0",
- "vitest": "^3.0.0"
+ "vitest": "^3.0.0",
+ "@types/pg": "^8.11.0"
},
"type": "module"
-}
+}
\ No newline at end of file
diff --git a/public/index.html b/public/index.html
index 065cf70..2132592 100644
--- a/public/index.html
+++ b/public/index.html
@@ -256,7 +256,7 @@ html, body {
Avg. generation time
diff --git a/scripts/migrate-to-postgres.mjs b/scripts/migrate-to-postgres.mjs
new file mode 100644
index 0000000..9c6580f
--- /dev/null
+++ b/scripts/migrate-to-postgres.mjs
@@ -0,0 +1,143 @@
+#!/usr/bin/env node
+/**
+ * Migration script: JSON files → PostgreSQL
+ * Run on the server where JSON data files exist.
+ * Usage: DATABASE_PASSWORD=docfast node scripts/migrate-to-postgres.mjs
+ */
+import pg from "pg";
+import { readFileSync, existsSync } from "fs";
+
+const { Pool } = pg;
+
+const pool = new Pool({
+ host: process.env.DATABASE_HOST || "127.0.0.1",
+ port: parseInt(process.env.DATABASE_PORT || "5432", 10),
+ database: process.env.DATABASE_NAME || "docfast",
+ user: process.env.DATABASE_USER || "docfast",
+ password: process.env.DATABASE_PASSWORD || "docfast",
+});
+
+async function migrate() {
+ const client = await pool.connect();
+ try {
+ // Create tables
+ await client.query(`
+ CREATE TABLE IF NOT EXISTS api_keys (
+ key TEXT PRIMARY KEY,
+ tier TEXT NOT NULL DEFAULT 'free',
+ email TEXT NOT NULL DEFAULT '',
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ stripe_customer_id TEXT
+ );
+ CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
+ CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id);
+
+ CREATE TABLE IF NOT EXISTS verifications (
+ id SERIAL PRIMARY KEY,
+ email TEXT NOT NULL,
+ token TEXT NOT NULL UNIQUE,
+ api_key TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ verified_at TIMESTAMPTZ
+ );
+ CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email);
+ CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token);
+
+ CREATE TABLE IF NOT EXISTS pending_verifications (
+ email TEXT PRIMARY KEY,
+ code TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ expires_at TIMESTAMPTZ NOT NULL,
+ attempts INT NOT NULL DEFAULT 0
+ );
+
+ CREATE TABLE IF NOT EXISTS usage (
+ key TEXT PRIMARY KEY,
+ count INT NOT NULL DEFAULT 0,
+ month_key TEXT NOT NULL
+ );
+ `);
+ console.log("✅ Tables created");
+
+ // Migrate keys.json
+ const keysPath = "/opt/docfast/data/keys.json";
+ if (existsSync(keysPath)) {
+ const keysData = JSON.parse(readFileSync(keysPath, "utf-8"));
+ const keys = keysData.keys || [];
+ let keyCount = 0;
+ for (const k of keys) {
+ await client.query(
+ `INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id)
+ VALUES ($1, $2, $3, $4, $5) ON CONFLICT (key) DO NOTHING`,
+ [k.key, k.tier, k.email || "", k.createdAt, k.stripeCustomerId || null]
+ );
+ keyCount++;
+ }
+ console.log(`✅ Migrated ${keyCount} API keys`);
+ } else {
+ // Try docker volume path
+ console.log("⚠️ keys.json not found at", keysPath);
+ }
+
+ // Migrate verifications.json
+ const verifPath = "/opt/docfast/data/verifications.json";
+ if (existsSync(verifPath)) {
+ const data = JSON.parse(readFileSync(verifPath, "utf-8"));
+ const verifications = Array.isArray(data) ? data : (data.verifications || []);
+ const pending = data.pendingVerifications || [];
+
+ let vCount = 0;
+ for (const v of verifications) {
+ await client.query(
+ `INSERT INTO verifications (email, token, api_key, created_at, verified_at)
+ VALUES ($1, $2, $3, $4, $5) ON CONFLICT (token) DO NOTHING`,
+ [v.email, v.token, v.apiKey, v.createdAt, v.verifiedAt || null]
+ );
+ vCount++;
+ }
+ console.log(`✅ Migrated ${vCount} verifications`);
+
+ let pCount = 0;
+ for (const p of pending) {
+ await client.query(
+ `INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts)
+ VALUES ($1, $2, $3, $4, $5) ON CONFLICT (email) DO NOTHING`,
+ [p.email, p.code, p.createdAt, p.expiresAt, p.attempts]
+ );
+ pCount++;
+ }
+ console.log(`✅ Migrated ${pCount} pending verifications`);
+ } else {
+ console.log("⚠️ verifications.json not found at", verifPath);
+ }
+
+ // Migrate usage.json
+ const usagePath = "/opt/docfast/data/usage.json";
+ if (existsSync(usagePath)) {
+ const usageData = JSON.parse(readFileSync(usagePath, "utf-8"));
+ let uCount = 0;
+ for (const [key, record] of Object.entries(usageData)) {
+ const r = record as any;
+ await client.query(
+ `INSERT INTO usage (key, count, month_key)
+ VALUES ($1, $2, $3) ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`,
+ [key, r.count, r.monthKey]
+ );
+ uCount++;
+ }
+ console.log(`✅ Migrated ${uCount} usage records`);
+ } else {
+ console.log("⚠️ usage.json not found at", usagePath);
+ }
+
+ console.log("\n🎉 Migration complete!");
+ } finally {
+ client.release();
+ await pool.end();
+ }
+}
+
+migrate().catch((err) => {
+ console.error("Migration failed:", err);
+ process.exit(1);
+});
diff --git a/src/index.ts b/src/index.ts
index 0d2d85e..cc94be3 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -11,19 +11,17 @@ import { recoverRouter } from "./routes/recover.js";
import { billingRouter } from "./routes/billing.js";
import { emailChangeRouter } from "./routes/email-change.js";
import { authMiddleware } from "./middleware/auth.js";
-import { usageMiddleware } from "./middleware/usage.js";
+import { usageMiddleware, loadUsageData } from "./middleware/usage.js";
import { getUsageStats } from "./middleware/usage.js";
import { pdfRateLimitMiddleware, getConcurrencyStats } from "./middleware/pdfRateLimit.js";
import { initBrowser, closeBrowser } from "./services/browser.js";
import { loadKeys, getAllKeys } from "./services/keys.js";
-import { verifyToken } from "./services/verification.js";
+import { verifyToken, loadVerifications } from "./services/verification.js";
+import { initDatabase } from "./services/db.js";
const app = express();
const PORT = parseInt(process.env.PORT || "3100", 10);
-// Load API keys from persistent store
-loadKeys();
-
app.use(helmet({ crossOriginResourcePolicy: { policy: "cross-origin" } }));
// Differentiated CORS middleware
@@ -34,10 +32,8 @@ app.use((req, res, next) => {
req.path.startsWith('/v1/email-change');
if (isAuthBillingRoute) {
- // Auth/billing routes: restrict to docfast.dev
res.setHeader("Access-Control-Allow-Origin", "https://docfast.dev");
} else {
- // Conversion API routes: allow all origins
res.setHeader("Access-Control-Allow-Origin", "*");
}
@@ -60,7 +56,7 @@ app.use(express.text({ limit: "2mb", type: "text/*" }));
// Trust nginx proxy
app.set("trust proxy", 1);
-// Global rate limiting - reduced from 10,000 to reasonable limit
+// Global rate limiting
const limiter = rateLimit({
windowMs: 60_000,
max: 100,
@@ -174,6 +170,14 @@ app.get("/api", (_req, res) => {
});
async function start() {
+ // Initialize PostgreSQL
+ await initDatabase();
+
+ // Load data from PostgreSQL
+ await loadKeys();
+ await loadVerifications();
+ await loadUsageData();
+
await initBrowser();
console.log(`Loaded ${getAllKeys().length} API keys`);
app.listen(PORT, () => console.log(`DocFast API running on :${PORT}`));
diff --git a/src/middleware/usage.ts b/src/middleware/usage.ts
index dce569a..8077970 100644
--- a/src/middleware/usage.ts
+++ b/src/middleware/usage.ts
@@ -1,48 +1,43 @@
import { isProKey } from "../services/keys.js";
-import fs from "fs/promises";
-import path from "path";
+import pool from "../services/db.js";
-const USAGE_FILE = "/app/data/usage.json";
-let usage = new Map
();
const FREE_TIER_LIMIT = 100;
+// In-memory cache, periodically synced to PostgreSQL
+let usage = new Map();
+
function getMonthKey(): string {
const d = new Date();
return `${d.getFullYear()}-${String(d.getMonth() + 1).padStart(2, "0")}`;
}
-async function loadUsageData(): Promise {
+export async function loadUsageData(): Promise {
try {
- const data = await fs.readFile(USAGE_FILE, "utf8");
- const usageObj = JSON.parse(data);
+ const result = await pool.query("SELECT key, count, month_key FROM usage");
usage = new Map();
- for (const [key, record] of Object.entries(usageObj)) {
- usage.set(key, record as { count: number; monthKey: string });
+ for (const row of result.rows) {
+ usage.set(row.key, { count: row.count, monthKey: row.month_key });
}
- console.log(`Loaded usage data for ${usage.size} keys`);
+ console.log(`Loaded usage data for ${usage.size} keys from PostgreSQL`);
} catch (error) {
console.log("No existing usage data found, starting fresh");
usage = new Map();
}
}
-async function saveUsageData(): Promise {
+async function saveUsageEntry(key: string, record: { count: number; monthKey: string }): Promise {
try {
- const usageObj: Record = {};
- for (const [key, record] of usage) {
- usageObj[key] = record;
- }
- await fs.mkdir(path.dirname(USAGE_FILE), { recursive: true });
- await fs.writeFile(USAGE_FILE, JSON.stringify(usageObj, null, 2));
+ await pool.query(
+ `INSERT INTO usage (key, count, month_key) VALUES ($1, $2, $3)
+ ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`,
+ [key, record.count, record.monthKey]
+ );
} catch (error) {
console.error("Failed to save usage data:", error);
}
}
-loadUsageData().catch(console.error);
-
export function usageMiddleware(req: any, res: any, next: any): void {
- // Use apiKeyInfo attached by auth middleware (works for both Bearer and X-API-Key)
const keyInfo = req.apiKeyInfo;
const key = keyInfo?.key || "unknown";
const monthKey = getMonthKey();
@@ -71,11 +66,13 @@ export function usageMiddleware(req: any, res: any, next: any): void {
function trackUsage(key: string, monthKey: string): void {
const record = usage.get(key);
if (!record || record.monthKey !== monthKey) {
- usage.set(key, { count: 1, monthKey });
+ const newRecord = { count: 1, monthKey };
+ usage.set(key, newRecord);
+ saveUsageEntry(key, newRecord).catch(console.error);
} else {
record.count++;
+ saveUsageEntry(key, record).catch(console.error);
}
- saveUsageData().catch(console.error);
}
export function getUsageStats(): Record {
diff --git a/src/routes/billing.ts b/src/routes/billing.ts
index d3bbbac..3618528 100644
--- a/src/routes/billing.ts
+++ b/src/routes/billing.ts
@@ -56,7 +56,7 @@ router.get("/success", async (req: Request, res: Response) => {
return;
}
- const keyInfo = createProKey(email, customerId);
+ const keyInfo = await createProKey(email, customerId);
// Return a nice HTML page instead of raw JSON
res.send(`
@@ -125,14 +125,14 @@ router.post("/webhook", async (req: Request, res: Response) => {
break;
}
- const keyInfo = createProKey(email, customerId);
+ const keyInfo = await createProKey(email, customerId);
console.log(`checkout.session.completed: provisioned pro key for ${email} (customer: ${customerId}, key: ${keyInfo.key.slice(0, 12)}...)`);
break;
}
case "customer.subscription.deleted": {
const sub = event.data.object as Stripe.Subscription;
const customerId = sub.customer as string;
- revokeByCustomer(customerId);
+ await revokeByCustomer(customerId);
console.log(`Subscription cancelled for ${customerId}, key revoked`);
break;
}
diff --git a/src/routes/email-change.ts b/src/routes/email-change.ts
index fd820df..2121450 100644
--- a/src/routes/email-change.ts
+++ b/src/routes/email-change.ts
@@ -15,7 +15,6 @@ const changeLimiter = rateLimit({
legacyHeaders: false,
});
-// Step 1: Request email change — sends verification code to NEW email
router.post("/", changeLimiter, async (req: Request, res: Response) => {
const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey;
const newEmail = req.body?.newEmail;
@@ -44,8 +43,7 @@ router.post("/", changeLimiter, async (req: Request, res: Response) => {
return;
}
- const pending = createPendingVerification(cleanEmail);
- (pending as any)._changeContext = { apiKey, newEmail: cleanEmail, oldEmail: userKey.email };
+ const pending = await createPendingVerification(cleanEmail);
sendVerificationEmail(cleanEmail, (pending as any).code).catch((err: Error) => {
console.error(`Failed to send email change verification to ${cleanEmail}:`, err);
@@ -54,7 +52,6 @@ router.post("/", changeLimiter, async (req: Request, res: Response) => {
res.json({ status: "verification_sent", message: "Verification code sent to your new email address." });
});
-// Step 2: Verify code — updates email
router.post("/verify", changeLimiter, async (req: Request, res: Response) => {
const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey;
const { newEmail, code } = req.body || {};
@@ -74,11 +71,11 @@ router.post("/verify", changeLimiter, async (req: Request, res: Response) => {
return;
}
- const result = verifyCode(cleanEmail, cleanCode);
+ const result = await verifyCode(cleanEmail, cleanCode);
switch (result.status) {
case "ok": {
- const updated = updateKeyEmail(apiKey, cleanEmail);
+ const updated = await updateKeyEmail(apiKey, cleanEmail);
if (updated) {
res.json({ status: "updated", message: "Email address updated successfully.", newEmail: cleanEmail });
} else {
diff --git a/src/routes/recover.ts b/src/routes/recover.ts
index 8c0d934..b1027b7 100644
--- a/src/routes/recover.ts
+++ b/src/routes/recover.ts
@@ -14,7 +14,6 @@ const recoverLimiter = rateLimit({
legacyHeaders: false,
});
-// Step 1: Request recovery — sends verification code via email
router.post("/", recoverLimiter, async (req: Request, res: Response) => {
const { email } = req.body || {};
@@ -24,20 +23,16 @@ router.post("/", recoverLimiter, async (req: Request, res: Response) => {
}
const cleanEmail = email.trim().toLowerCase();
-
- // Check if this email has any keys
const keys = getAllKeys();
const userKey = keys.find(k => k.email === cleanEmail);
- // Always return success to prevent email enumeration
if (!userKey) {
res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." });
return;
}
- const pending = createPendingVerification(cleanEmail);
+ const pending = await createPendingVerification(cleanEmail);
- // Send verification CODE only — NEVER send the API key via email
sendVerificationEmail(cleanEmail, pending.code).catch(err => {
console.error(`Failed to send recovery email to ${cleanEmail}:`, err);
});
@@ -45,7 +40,6 @@ router.post("/", recoverLimiter, async (req: Request, res: Response) => {
res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." });
});
-// Step 2: Verify code — returns API key in response (NEVER via email)
router.post("/verify", recoverLimiter, async (req: Request, res: Response) => {
const { email, code } = req.body || {};
@@ -57,7 +51,7 @@ router.post("/verify", recoverLimiter, async (req: Request, res: Response) => {
const cleanEmail = email.trim().toLowerCase();
const cleanCode = String(code).trim();
- const result = verifyCode(cleanEmail, cleanCode);
+ const result = await verifyCode(cleanEmail, cleanCode);
switch (result.status) {
case "ok": {
@@ -65,7 +59,6 @@ router.post("/verify", recoverLimiter, async (req: Request, res: Response) => {
const userKey = keys.find(k => k.email === cleanEmail);
if (userKey) {
- // Return key in response — shown once in browser, never emailed
res.json({
status: "recovered",
apiKey: userKey.key,
diff --git a/src/routes/signup.ts b/src/routes/signup.ts
index de4f2e8..da296e6 100644
--- a/src/routes/signup.ts
+++ b/src/routes/signup.ts
@@ -22,11 +22,11 @@ const verifyLimiter = rateLimit({
legacyHeaders: false,
});
-function rejectDuplicateEmail(req: Request, res: Response, next: Function) {
+async function rejectDuplicateEmail(req: Request, res: Response, next: Function) {
const { email } = req.body || {};
if (email && typeof email === "string") {
const cleanEmail = email.trim().toLowerCase();
- if (isEmailVerified(cleanEmail)) {
+ if (await isEmailVerified(cleanEmail)) {
res.status(409).json({ error: "Email already registered" });
return;
}
@@ -45,14 +45,13 @@ router.post("/free", rejectDuplicateEmail, signupLimiter, async (req: Request, r
const cleanEmail = email.trim().toLowerCase();
- if (isEmailVerified(cleanEmail)) {
+ if (await isEmailVerified(cleanEmail)) {
res.status(409).json({ error: "This email is already registered. Contact support if you need help." });
return;
}
- const pending = createPendingVerification(cleanEmail);
+ const pending = await createPendingVerification(cleanEmail);
- // Send verification code via email (fire-and-forget, don't block response)
sendVerificationEmail(cleanEmail, pending.code).catch(err => {
console.error(`Failed to send verification email to ${cleanEmail}:`, err);
});
@@ -64,7 +63,7 @@ router.post("/free", rejectDuplicateEmail, signupLimiter, async (req: Request, r
});
// Step 2: Verify code — creates API key
-router.post("/verify", verifyLimiter, (req: Request, res: Response) => {
+router.post("/verify", verifyLimiter, async (req: Request, res: Response) => {
const { email, code } = req.body || {};
if (!email || !code) {
@@ -75,17 +74,17 @@ router.post("/verify", verifyLimiter, (req: Request, res: Response) => {
const cleanEmail = email.trim().toLowerCase();
const cleanCode = String(code).trim();
- if (isEmailVerified(cleanEmail)) {
+ if (await isEmailVerified(cleanEmail)) {
res.status(409).json({ error: "This email is already verified." });
return;
}
- const result = verifyCode(cleanEmail, cleanCode);
+ const result = await verifyCode(cleanEmail, cleanCode);
switch (result.status) {
case "ok": {
- const keyInfo = createFreeKey(cleanEmail);
- const verification = createVerification(cleanEmail, keyInfo.key);
+ const keyInfo = await createFreeKey(cleanEmail);
+ const verification = await createVerification(cleanEmail, keyInfo.key);
verification.verifiedAt = new Date().toISOString();
res.json({
diff --git a/src/services/db.ts b/src/services/db.ts
new file mode 100644
index 0000000..41e9a92
--- /dev/null
+++ b/src/services/db.ts
@@ -0,0 +1,65 @@
+import pg from "pg";
+
+const { Pool } = pg;
+
+const pool = new Pool({
+ host: process.env.DATABASE_HOST || "172.17.0.1",
+ port: parseInt(process.env.DATABASE_PORT || "5432", 10),
+ database: process.env.DATABASE_NAME || "docfast",
+ user: process.env.DATABASE_USER || "docfast",
+ password: process.env.DATABASE_PASSWORD || "docfast",
+ max: 10,
+ idleTimeoutMillis: 30000,
+});
+
+pool.on("error", (err) => {
+ console.error("Unexpected PostgreSQL pool error:", err);
+});
+
+export async function initDatabase(): Promise {
+ const client = await pool.connect();
+ try {
+ await client.query(`
+ CREATE TABLE IF NOT EXISTS api_keys (
+ key TEXT PRIMARY KEY,
+ tier TEXT NOT NULL DEFAULT 'free',
+ email TEXT NOT NULL DEFAULT '',
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ stripe_customer_id TEXT
+ );
+ CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
+ CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id);
+
+ CREATE TABLE IF NOT EXISTS verifications (
+ id SERIAL PRIMARY KEY,
+ email TEXT NOT NULL,
+ token TEXT NOT NULL UNIQUE,
+ api_key TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ verified_at TIMESTAMPTZ
+ );
+ CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email);
+ CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token);
+
+ CREATE TABLE IF NOT EXISTS pending_verifications (
+ email TEXT PRIMARY KEY,
+ code TEXT NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ expires_at TIMESTAMPTZ NOT NULL,
+ attempts INT NOT NULL DEFAULT 0
+ );
+
+ CREATE TABLE IF NOT EXISTS usage (
+ key TEXT PRIMARY KEY,
+ count INT NOT NULL DEFAULT 0,
+ month_key TEXT NOT NULL
+ );
+ `);
+ console.log("PostgreSQL tables initialized");
+ } finally {
+ client.release();
+ }
+}
+
+export { pool };
+export default pool;
diff --git a/src/services/keys.ts b/src/services/keys.ts
index d5db146..0055d61 100644
--- a/src/services/keys.ts
+++ b/src/services/keys.ts
@@ -1,11 +1,5 @@
import { randomBytes } from "crypto";
-import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
-import path from "path";
-import { fileURLToPath } from "url";
-
-const __dirname = path.dirname(fileURLToPath(import.meta.url));
-const DATA_DIR = path.join(__dirname, "../../data");
-const KEYS_FILE = path.join(DATA_DIR, "keys.json");
+import pool from "./db.js";
export interface ApiKey {
key: string;
@@ -15,47 +9,48 @@ export interface ApiKey {
stripeCustomerId?: string;
}
-interface KeyStore {
- keys: ApiKey[];
-}
+// In-memory cache for fast lookups, synced with PostgreSQL
+let keysCache: ApiKey[] = [];
-let store: KeyStore = { keys: [] };
-
-function ensureDataDir(): void {
- if (!existsSync(DATA_DIR)) {
- mkdirSync(DATA_DIR, { recursive: true });
+export async function loadKeys(): Promise {
+ try {
+ const result = await pool.query(
+ "SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys"
+ );
+ keysCache = result.rows.map((r) => ({
+ key: r.key,
+ tier: r.tier as "free" | "pro",
+ email: r.email,
+ createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
+ stripeCustomerId: r.stripe_customer_id || undefined,
+ }));
+ } catch (err) {
+ console.error("Failed to load keys from PostgreSQL:", err);
+ keysCache = [];
}
-}
-export function loadKeys(): void {
- ensureDataDir();
- if (existsSync(KEYS_FILE)) {
- try {
- store = JSON.parse(readFileSync(KEYS_FILE, "utf-8"));
- } catch {
- store = { keys: [] };
- }
- }
// Also load seed keys from env
const envKeys = process.env.API_KEYS?.split(",").map((k) => k.trim()).filter(Boolean) || [];
for (const k of envKeys) {
- if (!store.keys.find((e) => e.key === k)) {
- store.keys.push({ key: k, tier: "pro", email: "seed@docfast.dev", createdAt: new Date().toISOString() });
+ if (!keysCache.find((e) => e.key === k)) {
+ const entry: ApiKey = { key: k, tier: "pro", email: "seed@docfast.dev", createdAt: new Date().toISOString() };
+ keysCache.push(entry);
+ // Upsert into DB
+ await pool.query(
+ `INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4)
+ ON CONFLICT (key) DO NOTHING`,
+ [k, "pro", "seed@docfast.dev", new Date().toISOString()]
+ ).catch(() => {});
}
}
}
-function save(): void {
- ensureDataDir();
- writeFileSync(KEYS_FILE, JSON.stringify(store, null, 2));
-}
-
export function isValidKey(key: string): boolean {
- return store.keys.some((k) => k.key === key);
+ return keysCache.some((k) => k.key === key);
}
export function getKeyInfo(key: string): ApiKey | undefined {
- return store.keys.find((k) => k.key === key);
+ return keysCache.find((k) => k.key === key);
}
export function isProKey(key: string): boolean {
@@ -67,10 +62,9 @@ function generateKey(prefix: string): string {
return `${prefix}_${randomBytes(24).toString("hex")}`;
}
-export function createFreeKey(email?: string): ApiKey {
- // If email provided, check if it already has a free key
+export async function createFreeKey(email?: string): Promise {
if (email) {
- const existing = store.keys.find((k) => k.email === email && k.tier === "free");
+ const existing = keysCache.find((k) => k.email === email && k.tier === "free");
if (existing) return existing;
}
@@ -80,16 +74,20 @@ export function createFreeKey(email?: string): ApiKey {
email: email || "",
createdAt: new Date().toISOString(),
};
- store.keys.push(entry);
- save();
+
+ await pool.query(
+ "INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4)",
+ [entry.key, entry.tier, entry.email, entry.createdAt]
+ );
+ keysCache.push(entry);
return entry;
}
-export function createProKey(email: string, stripeCustomerId: string): ApiKey {
- const existing = store.keys.find((k) => k.stripeCustomerId === stripeCustomerId);
+export async function createProKey(email: string, stripeCustomerId: string): Promise {
+ const existing = keysCache.find((k) => k.stripeCustomerId === stripeCustomerId);
if (existing) {
existing.tier = "pro";
- save();
+ await pool.query("UPDATE api_keys SET tier = 'pro' WHERE key = $1", [existing.key]);
return existing;
}
@@ -100,29 +98,34 @@ export function createProKey(email: string, stripeCustomerId: string): ApiKey {
createdAt: new Date().toISOString(),
stripeCustomerId,
};
- store.keys.push(entry);
- save();
+
+ await pool.query(
+ "INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id) VALUES ($1, $2, $3, $4, $5)",
+ [entry.key, entry.tier, entry.email, entry.createdAt, entry.stripeCustomerId]
+ );
+ keysCache.push(entry);
return entry;
}
-export function revokeByCustomer(stripeCustomerId: string): boolean {
- const idx = store.keys.findIndex((k) => k.stripeCustomerId === stripeCustomerId);
+export async function revokeByCustomer(stripeCustomerId: string): Promise {
+ const idx = keysCache.findIndex((k) => k.stripeCustomerId === stripeCustomerId);
if (idx >= 0) {
- store.keys.splice(idx, 1);
- save();
+ const key = keysCache[idx].key;
+ keysCache.splice(idx, 1);
+ await pool.query("DELETE FROM api_keys WHERE key = $1", [key]);
return true;
}
return false;
}
export function getAllKeys(): ApiKey[] {
- return [...store.keys];
+ return [...keysCache];
}
-export function updateKeyEmail(apiKey: string, newEmail: string): boolean {
- const entry = store.keys.find(k => k.key === apiKey);
+export async function updateKeyEmail(apiKey: string, newEmail: string): Promise {
+ const entry = keysCache.find((k) => k.key === apiKey);
if (!entry) return false;
entry.email = newEmail;
- save();
+ await pool.query("UPDATE api_keys SET email = $1 WHERE key = $2", [newEmail, apiKey]);
return true;
}
diff --git a/src/services/verification.ts b/src/services/verification.ts
index e66cc37..818371c 100644
--- a/src/services/verification.ts
+++ b/src/services/verification.ts
@@ -1,11 +1,5 @@
import { randomBytes, randomInt } from "crypto";
-import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
-import path from "path";
-import { fileURLToPath } from "url";
-
-const __dirname = path.dirname(fileURLToPath(import.meta.url));
-const DATA_DIR = path.join(__dirname, "../../data");
-const DB_PATH = path.join(DATA_DIR, "verifications.json");
+import pool from "./db.js";
export interface Verification {
email: string;
@@ -23,79 +17,68 @@ export interface PendingVerification {
attempts: number;
}
-let verifications: Verification[] = [];
-let pendingVerifications: PendingVerification[] = [];
-
-function ensureDataDir(): void {
- if (!existsSync(DATA_DIR)) mkdirSync(DATA_DIR, { recursive: true });
-}
-
-function load(): void {
- ensureDataDir();
- if (existsSync(DB_PATH)) {
- try {
- const data = JSON.parse(readFileSync(DB_PATH, "utf-8"));
- // Support both old format (array) and new format (object)
- if (Array.isArray(data)) {
- verifications = data;
- pendingVerifications = [];
- } else {
- verifications = data.verifications || [];
- pendingVerifications = data.pendingVerifications || [];
- }
- } catch {
- verifications = [];
- pendingVerifications = [];
- }
- }
-}
-
-function save(): void {
- ensureDataDir();
- writeFileSync(DB_PATH, JSON.stringify({ verifications, pendingVerifications }, null, 2));
-}
-
-load();
-
const TOKEN_EXPIRY_MS = 24 * 60 * 60 * 1000;
-const CODE_EXPIRY_MS = 15 * 60 * 1000; // 15 minutes
+const CODE_EXPIRY_MS = 15 * 60 * 1000;
const MAX_ATTEMPTS = 3;
-// Legacy token-based verification (keep for existing links)
-export function createVerification(email: string, apiKey: string): Verification {
- const existing = verifications.find(v => v.email === email && !v.verifiedAt);
- if (existing) {
- const age = Date.now() - new Date(existing.createdAt).getTime();
- if (age < TOKEN_EXPIRY_MS) return existing;
- verifications = verifications.filter(v => v !== existing);
+export async function createVerification(email: string, apiKey: string): Promise {
+ // Check for existing unexpired, unverified
+ const existing = await pool.query(
+ "SELECT * FROM verifications WHERE email = $1 AND verified_at IS NULL AND created_at > NOW() - INTERVAL '24 hours' LIMIT 1",
+ [email]
+ );
+ if (existing.rows.length > 0) {
+ const r = existing.rows[0];
+ return { email: r.email, token: r.token, apiKey: r.api_key, createdAt: r.created_at.toISOString(), verifiedAt: null };
}
- const verification: Verification = {
- email,
- token: randomBytes(32).toString("hex"),
- apiKey,
- createdAt: new Date().toISOString(),
- verifiedAt: null,
- };
- verifications.push(verification);
- save();
- return verification;
+
+ // Remove old unverified
+ await pool.query("DELETE FROM verifications WHERE email = $1 AND verified_at IS NULL", [email]);
+
+ const token = randomBytes(32).toString("hex");
+ const now = new Date().toISOString();
+ await pool.query(
+ "INSERT INTO verifications (email, token, api_key, created_at) VALUES ($1, $2, $3, $4)",
+ [email, token, apiKey, now]
+ );
+ return { email, token, apiKey, createdAt: now, verifiedAt: null };
}
export function verifyToken(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } {
- const v = verifications.find(v => v.token === token);
+ // Synchronous wrapper — we'll make it async-compatible
+ // Actually need to keep sync for the GET /verify route. Use sync query workaround or refactor.
+ // For simplicity, we'll cache verifications in memory too.
+ return verifyTokenSync(token);
+}
+
+// In-memory cache for verifications (loaded on startup, updated on changes)
+let verificationsCache: Verification[] = [];
+
+export async function loadVerifications(): Promise {
+ const result = await pool.query("SELECT * FROM verifications");
+ verificationsCache = result.rows.map((r) => ({
+ email: r.email,
+ token: r.token,
+ apiKey: r.api_key,
+ createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
+ verifiedAt: r.verified_at ? (r.verified_at instanceof Date ? r.verified_at.toISOString() : r.verified_at) : null,
+ }));
+}
+
+function verifyTokenSync(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } {
+ const v = verificationsCache.find((v) => v.token === token);
if (!v) return { status: "invalid" };
if (v.verifiedAt) return { status: "already_verified", verification: v };
const age = Date.now() - new Date(v.createdAt).getTime();
if (age > TOKEN_EXPIRY_MS) return { status: "expired" };
v.verifiedAt = new Date().toISOString();
- save();
+ // Update DB async
+ pool.query("UPDATE verifications SET verified_at = $1 WHERE token = $2", [v.verifiedAt, token]).catch(console.error);
return { status: "ok", verification: v };
}
-// New 6-digit code verification
-export function createPendingVerification(email: string): PendingVerification {
- // Remove any existing pending for this email
- pendingVerifications = pendingVerifications.filter(p => p.email !== email);
+export async function createPendingVerification(email: string): Promise {
+ await pool.query("DELETE FROM pending_verifications WHERE email = $1", [email]);
const now = new Date();
const pending: PendingVerification = {
@@ -105,47 +88,53 @@ export function createPendingVerification(email: string): PendingVerification {
expiresAt: new Date(now.getTime() + CODE_EXPIRY_MS).toISOString(),
attempts: 0,
};
- pendingVerifications.push(pending);
- save();
+
+ await pool.query(
+ "INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts) VALUES ($1, $2, $3, $4, $5)",
+ [pending.email, pending.code, pending.createdAt, pending.expiresAt, pending.attempts]
+ );
return pending;
}
-export function verifyCode(email: string, code: string): { status: "ok" | "invalid" | "expired" | "max_attempts" } {
+export async function verifyCode(email: string, code: string): Promise<{ status: "ok" | "invalid" | "expired" | "max_attempts" }> {
const cleanEmail = email.trim().toLowerCase();
- const pending = pendingVerifications.find(p => p.email === cleanEmail);
+ const result = await pool.query("SELECT * FROM pending_verifications WHERE email = $1", [cleanEmail]);
+ const pending = result.rows[0];
if (!pending) return { status: "invalid" };
- if (new Date() > new Date(pending.expiresAt)) {
- pendingVerifications = pendingVerifications.filter(p => p !== pending);
- save();
+ if (new Date() > new Date(pending.expires_at)) {
+ await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]);
return { status: "expired" };
}
if (pending.attempts >= MAX_ATTEMPTS) {
- pendingVerifications = pendingVerifications.filter(p => p !== pending);
- save();
+ await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]);
return { status: "max_attempts" };
}
- pending.attempts++;
+ await pool.query("UPDATE pending_verifications SET attempts = attempts + 1 WHERE email = $1", [cleanEmail]);
if (pending.code !== code) {
- save();
return { status: "invalid" };
}
- // Success - remove pending
- pendingVerifications = pendingVerifications.filter(p => p !== pending);
- save();
+ await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]);
return { status: "ok" };
}
-export function isEmailVerified(email: string): boolean {
- return verifications.some(v => v.email === email && v.verifiedAt !== null);
+export async function isEmailVerified(email: string): Promise {
+ const result = await pool.query(
+ "SELECT 1 FROM verifications WHERE email = $1 AND verified_at IS NOT NULL LIMIT 1",
+ [email]
+ );
+ return result.rows.length > 0;
}
-export function getVerifiedApiKey(email: string): string | null {
- const v = verifications.find(v => v.email === email && v.verifiedAt !== null);
- return v?.apiKey ?? null;
+export async function getVerifiedApiKey(email: string): Promise {
+ const result = await pool.query(
+ "SELECT api_key FROM verifications WHERE email = $1 AND verified_at IS NOT NULL LIMIT 1",
+ [email]
+ );
+ return result.rows[0]?.api_key ?? null;
}