Migrate from JSON to PostgreSQL, update SLA to 99.5%

- Replace JSON file storage with PostgreSQL (pg package)
- Add db.ts service for connection pool and schema init
- Rewrite keys.ts, verification.ts, usage.ts for async PostgreSQL
- Update all routes for async function signatures
- Add migration script (scripts/migrate-to-postgres.mjs)
- Update docker-compose.yml with DATABASE_* env vars
- Change SLA from 99.9% to 99.5% in landing page
This commit is contained in:
DocFast Bot 2026-02-15 10:18:25 +00:00
parent bb1881af61
commit e9d16bf2a3
13 changed files with 395 additions and 198 deletions

View file

@ -17,6 +17,11 @@ services:
- PRO_KEYS=${PRO_KEYS} - PRO_KEYS=${PRO_KEYS}
- SMTP_HOST=host.docker.internal - SMTP_HOST=host.docker.internal
- SMTP_PORT=25 - SMTP_PORT=25
- DATABASE_HOST=172.17.0.1
- DATABASE_PORT=5432
- DATABASE_NAME=docfast
- DATABASE_USER=docfast
- DATABASE_PASSWORD=${DATABASE_PASSWORD:-docfast}
- POOL_SIZE=15 - POOL_SIZE=15
- BROWSER_COUNT=1 - BROWSER_COUNT=1
- PAGES_PER_BROWSER=15 - PAGES_PER_BROWSER=15

View file

@ -18,7 +18,8 @@
"nodemailer": "^8.0.1", "nodemailer": "^8.0.1",
"puppeteer": "^24.0.0", "puppeteer": "^24.0.0",
"stripe": "^20.3.1", "stripe": "^20.3.1",
"swagger-ui-dist": "^5.31.0" "swagger-ui-dist": "^5.31.0",
"pg": "^8.13.0"
}, },
"devDependencies": { "devDependencies": {
"@types/express": "^5.0.0", "@types/express": "^5.0.0",
@ -26,7 +27,8 @@
"@types/nodemailer": "^7.0.9", "@types/nodemailer": "^7.0.9",
"tsx": "^4.19.0", "tsx": "^4.19.0",
"typescript": "^5.7.0", "typescript": "^5.7.0",
"vitest": "^3.0.0" "vitest": "^3.0.0",
"@types/pg": "^8.11.0"
}, },
"type": "module" "type": "module"
} }

View file

@ -256,7 +256,7 @@ html, body {
<div class="trust-label">Avg. generation time</div> <div class="trust-label">Avg. generation time</div>
</div> </div>
<div class="trust-item"> <div class="trust-item">
<div class="trust-num">99.9%</div> <div class="trust-num">99.5%</div>
<div class="trust-label">Uptime SLA</div> <div class="trust-label">Uptime SLA</div>
</div> </div>
<div class="trust-item"> <div class="trust-item">

View file

@ -0,0 +1,143 @@
#!/usr/bin/env node
/**
* Migration script: JSON files PostgreSQL
* Run on the server where JSON data files exist.
* Usage: DATABASE_PASSWORD=docfast node scripts/migrate-to-postgres.mjs
*/
import pg from "pg";
import { readFileSync, existsSync } from "fs";
const { Pool } = pg;
const pool = new Pool({
host: process.env.DATABASE_HOST || "127.0.0.1",
port: parseInt(process.env.DATABASE_PORT || "5432", 10),
database: process.env.DATABASE_NAME || "docfast",
user: process.env.DATABASE_USER || "docfast",
password: process.env.DATABASE_PASSWORD || "docfast",
});
async function migrate() {
const client = await pool.connect();
try {
// Create tables
await client.query(`
CREATE TABLE IF NOT EXISTS api_keys (
key TEXT PRIMARY KEY,
tier TEXT NOT NULL DEFAULT 'free',
email TEXT NOT NULL DEFAULT '',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
stripe_customer_id TEXT
);
CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id);
CREATE TABLE IF NOT EXISTS verifications (
id SERIAL PRIMARY KEY,
email TEXT NOT NULL,
token TEXT NOT NULL UNIQUE,
api_key TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
verified_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email);
CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token);
CREATE TABLE IF NOT EXISTS pending_verifications (
email TEXT PRIMARY KEY,
code TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL,
attempts INT NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS usage (
key TEXT PRIMARY KEY,
count INT NOT NULL DEFAULT 0,
month_key TEXT NOT NULL
);
`);
console.log("✅ Tables created");
// Migrate keys.json
const keysPath = "/opt/docfast/data/keys.json";
if (existsSync(keysPath)) {
const keysData = JSON.parse(readFileSync(keysPath, "utf-8"));
const keys = keysData.keys || [];
let keyCount = 0;
for (const k of keys) {
await client.query(
`INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id)
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (key) DO NOTHING`,
[k.key, k.tier, k.email || "", k.createdAt, k.stripeCustomerId || null]
);
keyCount++;
}
console.log(`✅ Migrated ${keyCount} API keys`);
} else {
// Try docker volume path
console.log("⚠️ keys.json not found at", keysPath);
}
// Migrate verifications.json
const verifPath = "/opt/docfast/data/verifications.json";
if (existsSync(verifPath)) {
const data = JSON.parse(readFileSync(verifPath, "utf-8"));
const verifications = Array.isArray(data) ? data : (data.verifications || []);
const pending = data.pendingVerifications || [];
let vCount = 0;
for (const v of verifications) {
await client.query(
`INSERT INTO verifications (email, token, api_key, created_at, verified_at)
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (token) DO NOTHING`,
[v.email, v.token, v.apiKey, v.createdAt, v.verifiedAt || null]
);
vCount++;
}
console.log(`✅ Migrated ${vCount} verifications`);
let pCount = 0;
for (const p of pending) {
await client.query(
`INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts)
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (email) DO NOTHING`,
[p.email, p.code, p.createdAt, p.expiresAt, p.attempts]
);
pCount++;
}
console.log(`✅ Migrated ${pCount} pending verifications`);
} else {
console.log("⚠️ verifications.json not found at", verifPath);
}
// Migrate usage.json
const usagePath = "/opt/docfast/data/usage.json";
if (existsSync(usagePath)) {
const usageData = JSON.parse(readFileSync(usagePath, "utf-8"));
let uCount = 0;
for (const [key, record] of Object.entries(usageData)) {
const r = record as any;
await client.query(
`INSERT INTO usage (key, count, month_key)
VALUES ($1, $2, $3) ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`,
[key, r.count, r.monthKey]
);
uCount++;
}
console.log(`✅ Migrated ${uCount} usage records`);
} else {
console.log("⚠️ usage.json not found at", usagePath);
}
console.log("\n🎉 Migration complete!");
} finally {
client.release();
await pool.end();
}
}
migrate().catch((err) => {
console.error("Migration failed:", err);
process.exit(1);
});

View file

@ -11,19 +11,17 @@ import { recoverRouter } from "./routes/recover.js";
import { billingRouter } from "./routes/billing.js"; import { billingRouter } from "./routes/billing.js";
import { emailChangeRouter } from "./routes/email-change.js"; import { emailChangeRouter } from "./routes/email-change.js";
import { authMiddleware } from "./middleware/auth.js"; import { authMiddleware } from "./middleware/auth.js";
import { usageMiddleware } from "./middleware/usage.js"; import { usageMiddleware, loadUsageData } from "./middleware/usage.js";
import { getUsageStats } from "./middleware/usage.js"; import { getUsageStats } from "./middleware/usage.js";
import { pdfRateLimitMiddleware, getConcurrencyStats } from "./middleware/pdfRateLimit.js"; import { pdfRateLimitMiddleware, getConcurrencyStats } from "./middleware/pdfRateLimit.js";
import { initBrowser, closeBrowser } from "./services/browser.js"; import { initBrowser, closeBrowser } from "./services/browser.js";
import { loadKeys, getAllKeys } from "./services/keys.js"; import { loadKeys, getAllKeys } from "./services/keys.js";
import { verifyToken } from "./services/verification.js"; import { verifyToken, loadVerifications } from "./services/verification.js";
import { initDatabase } from "./services/db.js";
const app = express(); const app = express();
const PORT = parseInt(process.env.PORT || "3100", 10); const PORT = parseInt(process.env.PORT || "3100", 10);
// Load API keys from persistent store
loadKeys();
app.use(helmet({ crossOriginResourcePolicy: { policy: "cross-origin" } })); app.use(helmet({ crossOriginResourcePolicy: { policy: "cross-origin" } }));
// Differentiated CORS middleware // Differentiated CORS middleware
@ -34,10 +32,8 @@ app.use((req, res, next) => {
req.path.startsWith('/v1/email-change'); req.path.startsWith('/v1/email-change');
if (isAuthBillingRoute) { if (isAuthBillingRoute) {
// Auth/billing routes: restrict to docfast.dev
res.setHeader("Access-Control-Allow-Origin", "https://docfast.dev"); res.setHeader("Access-Control-Allow-Origin", "https://docfast.dev");
} else { } else {
// Conversion API routes: allow all origins
res.setHeader("Access-Control-Allow-Origin", "*"); res.setHeader("Access-Control-Allow-Origin", "*");
} }
@ -60,7 +56,7 @@ app.use(express.text({ limit: "2mb", type: "text/*" }));
// Trust nginx proxy // Trust nginx proxy
app.set("trust proxy", 1); app.set("trust proxy", 1);
// Global rate limiting - reduced from 10,000 to reasonable limit // Global rate limiting
const limiter = rateLimit({ const limiter = rateLimit({
windowMs: 60_000, windowMs: 60_000,
max: 100, max: 100,
@ -174,6 +170,14 @@ app.get("/api", (_req, res) => {
}); });
async function start() { async function start() {
// Initialize PostgreSQL
await initDatabase();
// Load data from PostgreSQL
await loadKeys();
await loadVerifications();
await loadUsageData();
await initBrowser(); await initBrowser();
console.log(`Loaded ${getAllKeys().length} API keys`); console.log(`Loaded ${getAllKeys().length} API keys`);
app.listen(PORT, () => console.log(`DocFast API running on :${PORT}`)); app.listen(PORT, () => console.log(`DocFast API running on :${PORT}`));

View file

@ -1,48 +1,43 @@
import { isProKey } from "../services/keys.js"; import { isProKey } from "../services/keys.js";
import fs from "fs/promises"; import pool from "../services/db.js";
import path from "path";
const USAGE_FILE = "/app/data/usage.json";
let usage = new Map<string, { count: number; monthKey: string }>();
const FREE_TIER_LIMIT = 100; const FREE_TIER_LIMIT = 100;
// In-memory cache, periodically synced to PostgreSQL
let usage = new Map<string, { count: number; monthKey: string }>();
function getMonthKey(): string { function getMonthKey(): string {
const d = new Date(); const d = new Date();
return `${d.getFullYear()}-${String(d.getMonth() + 1).padStart(2, "0")}`; return `${d.getFullYear()}-${String(d.getMonth() + 1).padStart(2, "0")}`;
} }
async function loadUsageData(): Promise<void> { export async function loadUsageData(): Promise<void> {
try { try {
const data = await fs.readFile(USAGE_FILE, "utf8"); const result = await pool.query("SELECT key, count, month_key FROM usage");
const usageObj = JSON.parse(data);
usage = new Map(); usage = new Map();
for (const [key, record] of Object.entries(usageObj)) { for (const row of result.rows) {
usage.set(key, record as { count: number; monthKey: string }); usage.set(row.key, { count: row.count, monthKey: row.month_key });
} }
console.log(`Loaded usage data for ${usage.size} keys`); console.log(`Loaded usage data for ${usage.size} keys from PostgreSQL`);
} catch (error) { } catch (error) {
console.log("No existing usage data found, starting fresh"); console.log("No existing usage data found, starting fresh");
usage = new Map(); usage = new Map();
} }
} }
async function saveUsageData(): Promise<void> { async function saveUsageEntry(key: string, record: { count: number; monthKey: string }): Promise<void> {
try { try {
const usageObj: Record<string, { count: number; monthKey: string }> = {}; await pool.query(
for (const [key, record] of usage) { `INSERT INTO usage (key, count, month_key) VALUES ($1, $2, $3)
usageObj[key] = record; ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`,
} [key, record.count, record.monthKey]
await fs.mkdir(path.dirname(USAGE_FILE), { recursive: true }); );
await fs.writeFile(USAGE_FILE, JSON.stringify(usageObj, null, 2));
} catch (error) { } catch (error) {
console.error("Failed to save usage data:", error); console.error("Failed to save usage data:", error);
} }
} }
loadUsageData().catch(console.error);
export function usageMiddleware(req: any, res: any, next: any): void { export function usageMiddleware(req: any, res: any, next: any): void {
// Use apiKeyInfo attached by auth middleware (works for both Bearer and X-API-Key)
const keyInfo = req.apiKeyInfo; const keyInfo = req.apiKeyInfo;
const key = keyInfo?.key || "unknown"; const key = keyInfo?.key || "unknown";
const monthKey = getMonthKey(); const monthKey = getMonthKey();
@ -71,11 +66,13 @@ export function usageMiddleware(req: any, res: any, next: any): void {
function trackUsage(key: string, monthKey: string): void { function trackUsage(key: string, monthKey: string): void {
const record = usage.get(key); const record = usage.get(key);
if (!record || record.monthKey !== monthKey) { if (!record || record.monthKey !== monthKey) {
usage.set(key, { count: 1, monthKey }); const newRecord = { count: 1, monthKey };
usage.set(key, newRecord);
saveUsageEntry(key, newRecord).catch(console.error);
} else { } else {
record.count++; record.count++;
saveUsageEntry(key, record).catch(console.error);
} }
saveUsageData().catch(console.error);
} }
export function getUsageStats(): Record<string, { count: number; month: string }> { export function getUsageStats(): Record<string, { count: number; month: string }> {

View file

@ -56,7 +56,7 @@ router.get("/success", async (req: Request, res: Response) => {
return; return;
} }
const keyInfo = createProKey(email, customerId); const keyInfo = await createProKey(email, customerId);
// Return a nice HTML page instead of raw JSON // Return a nice HTML page instead of raw JSON
res.send(`<!DOCTYPE html> res.send(`<!DOCTYPE html>
@ -125,14 +125,14 @@ router.post("/webhook", async (req: Request, res: Response) => {
break; break;
} }
const keyInfo = createProKey(email, customerId); const keyInfo = await createProKey(email, customerId);
console.log(`checkout.session.completed: provisioned pro key for ${email} (customer: ${customerId}, key: ${keyInfo.key.slice(0, 12)}...)`); console.log(`checkout.session.completed: provisioned pro key for ${email} (customer: ${customerId}, key: ${keyInfo.key.slice(0, 12)}...)`);
break; break;
} }
case "customer.subscription.deleted": { case "customer.subscription.deleted": {
const sub = event.data.object as Stripe.Subscription; const sub = event.data.object as Stripe.Subscription;
const customerId = sub.customer as string; const customerId = sub.customer as string;
revokeByCustomer(customerId); await revokeByCustomer(customerId);
console.log(`Subscription cancelled for ${customerId}, key revoked`); console.log(`Subscription cancelled for ${customerId}, key revoked`);
break; break;
} }

View file

@ -15,7 +15,6 @@ const changeLimiter = rateLimit({
legacyHeaders: false, legacyHeaders: false,
}); });
// Step 1: Request email change — sends verification code to NEW email
router.post("/", changeLimiter, async (req: Request, res: Response) => { router.post("/", changeLimiter, async (req: Request, res: Response) => {
const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey; const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey;
const newEmail = req.body?.newEmail; const newEmail = req.body?.newEmail;
@ -44,8 +43,7 @@ router.post("/", changeLimiter, async (req: Request, res: Response) => {
return; return;
} }
const pending = createPendingVerification(cleanEmail); const pending = await createPendingVerification(cleanEmail);
(pending as any)._changeContext = { apiKey, newEmail: cleanEmail, oldEmail: userKey.email };
sendVerificationEmail(cleanEmail, (pending as any).code).catch((err: Error) => { sendVerificationEmail(cleanEmail, (pending as any).code).catch((err: Error) => {
console.error(`Failed to send email change verification to ${cleanEmail}:`, err); console.error(`Failed to send email change verification to ${cleanEmail}:`, err);
@ -54,7 +52,6 @@ router.post("/", changeLimiter, async (req: Request, res: Response) => {
res.json({ status: "verification_sent", message: "Verification code sent to your new email address." }); res.json({ status: "verification_sent", message: "Verification code sent to your new email address." });
}); });
// Step 2: Verify code — updates email
router.post("/verify", changeLimiter, async (req: Request, res: Response) => { router.post("/verify", changeLimiter, async (req: Request, res: Response) => {
const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey; const apiKey = req.headers.authorization?.replace(/^Bearer\s+/i, "") || req.body?.apiKey;
const { newEmail, code } = req.body || {}; const { newEmail, code } = req.body || {};
@ -74,11 +71,11 @@ router.post("/verify", changeLimiter, async (req: Request, res: Response) => {
return; return;
} }
const result = verifyCode(cleanEmail, cleanCode); const result = await verifyCode(cleanEmail, cleanCode);
switch (result.status) { switch (result.status) {
case "ok": { case "ok": {
const updated = updateKeyEmail(apiKey, cleanEmail); const updated = await updateKeyEmail(apiKey, cleanEmail);
if (updated) { if (updated) {
res.json({ status: "updated", message: "Email address updated successfully.", newEmail: cleanEmail }); res.json({ status: "updated", message: "Email address updated successfully.", newEmail: cleanEmail });
} else { } else {

View file

@ -14,7 +14,6 @@ const recoverLimiter = rateLimit({
legacyHeaders: false, legacyHeaders: false,
}); });
// Step 1: Request recovery — sends verification code via email
router.post("/", recoverLimiter, async (req: Request, res: Response) => { router.post("/", recoverLimiter, async (req: Request, res: Response) => {
const { email } = req.body || {}; const { email } = req.body || {};
@ -24,20 +23,16 @@ router.post("/", recoverLimiter, async (req: Request, res: Response) => {
} }
const cleanEmail = email.trim().toLowerCase(); const cleanEmail = email.trim().toLowerCase();
// Check if this email has any keys
const keys = getAllKeys(); const keys = getAllKeys();
const userKey = keys.find(k => k.email === cleanEmail); const userKey = keys.find(k => k.email === cleanEmail);
// Always return success to prevent email enumeration
if (!userKey) { if (!userKey) {
res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." }); res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." });
return; return;
} }
const pending = createPendingVerification(cleanEmail); const pending = await createPendingVerification(cleanEmail);
// Send verification CODE only — NEVER send the API key via email
sendVerificationEmail(cleanEmail, pending.code).catch(err => { sendVerificationEmail(cleanEmail, pending.code).catch(err => {
console.error(`Failed to send recovery email to ${cleanEmail}:`, err); console.error(`Failed to send recovery email to ${cleanEmail}:`, err);
}); });
@ -45,7 +40,6 @@ router.post("/", recoverLimiter, async (req: Request, res: Response) => {
res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." }); res.json({ status: "recovery_sent", message: "If an account exists for this email, a verification code has been sent." });
}); });
// Step 2: Verify code — returns API key in response (NEVER via email)
router.post("/verify", recoverLimiter, async (req: Request, res: Response) => { router.post("/verify", recoverLimiter, async (req: Request, res: Response) => {
const { email, code } = req.body || {}; const { email, code } = req.body || {};
@ -57,7 +51,7 @@ router.post("/verify", recoverLimiter, async (req: Request, res: Response) => {
const cleanEmail = email.trim().toLowerCase(); const cleanEmail = email.trim().toLowerCase();
const cleanCode = String(code).trim(); const cleanCode = String(code).trim();
const result = verifyCode(cleanEmail, cleanCode); const result = await verifyCode(cleanEmail, cleanCode);
switch (result.status) { switch (result.status) {
case "ok": { case "ok": {
@ -65,7 +59,6 @@ router.post("/verify", recoverLimiter, async (req: Request, res: Response) => {
const userKey = keys.find(k => k.email === cleanEmail); const userKey = keys.find(k => k.email === cleanEmail);
if (userKey) { if (userKey) {
// Return key in response — shown once in browser, never emailed
res.json({ res.json({
status: "recovered", status: "recovered",
apiKey: userKey.key, apiKey: userKey.key,

View file

@ -22,11 +22,11 @@ const verifyLimiter = rateLimit({
legacyHeaders: false, legacyHeaders: false,
}); });
function rejectDuplicateEmail(req: Request, res: Response, next: Function) { async function rejectDuplicateEmail(req: Request, res: Response, next: Function) {
const { email } = req.body || {}; const { email } = req.body || {};
if (email && typeof email === "string") { if (email && typeof email === "string") {
const cleanEmail = email.trim().toLowerCase(); const cleanEmail = email.trim().toLowerCase();
if (isEmailVerified(cleanEmail)) { if (await isEmailVerified(cleanEmail)) {
res.status(409).json({ error: "Email already registered" }); res.status(409).json({ error: "Email already registered" });
return; return;
} }
@ -45,14 +45,13 @@ router.post("/free", rejectDuplicateEmail, signupLimiter, async (req: Request, r
const cleanEmail = email.trim().toLowerCase(); const cleanEmail = email.trim().toLowerCase();
if (isEmailVerified(cleanEmail)) { if (await isEmailVerified(cleanEmail)) {
res.status(409).json({ error: "This email is already registered. Contact support if you need help." }); res.status(409).json({ error: "This email is already registered. Contact support if you need help." });
return; return;
} }
const pending = createPendingVerification(cleanEmail); const pending = await createPendingVerification(cleanEmail);
// Send verification code via email (fire-and-forget, don't block response)
sendVerificationEmail(cleanEmail, pending.code).catch(err => { sendVerificationEmail(cleanEmail, pending.code).catch(err => {
console.error(`Failed to send verification email to ${cleanEmail}:`, err); console.error(`Failed to send verification email to ${cleanEmail}:`, err);
}); });
@ -64,7 +63,7 @@ router.post("/free", rejectDuplicateEmail, signupLimiter, async (req: Request, r
}); });
// Step 2: Verify code — creates API key // Step 2: Verify code — creates API key
router.post("/verify", verifyLimiter, (req: Request, res: Response) => { router.post("/verify", verifyLimiter, async (req: Request, res: Response) => {
const { email, code } = req.body || {}; const { email, code } = req.body || {};
if (!email || !code) { if (!email || !code) {
@ -75,17 +74,17 @@ router.post("/verify", verifyLimiter, (req: Request, res: Response) => {
const cleanEmail = email.trim().toLowerCase(); const cleanEmail = email.trim().toLowerCase();
const cleanCode = String(code).trim(); const cleanCode = String(code).trim();
if (isEmailVerified(cleanEmail)) { if (await isEmailVerified(cleanEmail)) {
res.status(409).json({ error: "This email is already verified." }); res.status(409).json({ error: "This email is already verified." });
return; return;
} }
const result = verifyCode(cleanEmail, cleanCode); const result = await verifyCode(cleanEmail, cleanCode);
switch (result.status) { switch (result.status) {
case "ok": { case "ok": {
const keyInfo = createFreeKey(cleanEmail); const keyInfo = await createFreeKey(cleanEmail);
const verification = createVerification(cleanEmail, keyInfo.key); const verification = await createVerification(cleanEmail, keyInfo.key);
verification.verifiedAt = new Date().toISOString(); verification.verifiedAt = new Date().toISOString();
res.json({ res.json({

65
src/services/db.ts Normal file
View file

@ -0,0 +1,65 @@
import pg from "pg";
const { Pool } = pg;
const pool = new Pool({
host: process.env.DATABASE_HOST || "172.17.0.1",
port: parseInt(process.env.DATABASE_PORT || "5432", 10),
database: process.env.DATABASE_NAME || "docfast",
user: process.env.DATABASE_USER || "docfast",
password: process.env.DATABASE_PASSWORD || "docfast",
max: 10,
idleTimeoutMillis: 30000,
});
pool.on("error", (err) => {
console.error("Unexpected PostgreSQL pool error:", err);
});
export async function initDatabase(): Promise<void> {
const client = await pool.connect();
try {
await client.query(`
CREATE TABLE IF NOT EXISTS api_keys (
key TEXT PRIMARY KEY,
tier TEXT NOT NULL DEFAULT 'free',
email TEXT NOT NULL DEFAULT '',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
stripe_customer_id TEXT
);
CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id);
CREATE TABLE IF NOT EXISTS verifications (
id SERIAL PRIMARY KEY,
email TEXT NOT NULL,
token TEXT NOT NULL UNIQUE,
api_key TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
verified_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email);
CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token);
CREATE TABLE IF NOT EXISTS pending_verifications (
email TEXT PRIMARY KEY,
code TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL,
attempts INT NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS usage (
key TEXT PRIMARY KEY,
count INT NOT NULL DEFAULT 0,
month_key TEXT NOT NULL
);
`);
console.log("PostgreSQL tables initialized");
} finally {
client.release();
}
}
export { pool };
export default pool;

View file

@ -1,11 +1,5 @@
import { randomBytes } from "crypto"; import { randomBytes } from "crypto";
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs"; import pool from "./db.js";
import path from "path";
import { fileURLToPath } from "url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const DATA_DIR = path.join(__dirname, "../../data");
const KEYS_FILE = path.join(DATA_DIR, "keys.json");
export interface ApiKey { export interface ApiKey {
key: string; key: string;
@ -15,47 +9,48 @@ export interface ApiKey {
stripeCustomerId?: string; stripeCustomerId?: string;
} }
interface KeyStore { // In-memory cache for fast lookups, synced with PostgreSQL
keys: ApiKey[]; let keysCache: ApiKey[] = [];
}
let store: KeyStore = { keys: [] }; export async function loadKeys(): Promise<void> {
function ensureDataDir(): void {
if (!existsSync(DATA_DIR)) {
mkdirSync(DATA_DIR, { recursive: true });
}
}
export function loadKeys(): void {
ensureDataDir();
if (existsSync(KEYS_FILE)) {
try { try {
store = JSON.parse(readFileSync(KEYS_FILE, "utf-8")); const result = await pool.query(
} catch { "SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys"
store = { keys: [] }; );
} keysCache = result.rows.map((r) => ({
key: r.key,
tier: r.tier as "free" | "pro",
email: r.email,
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
stripeCustomerId: r.stripe_customer_id || undefined,
}));
} catch (err) {
console.error("Failed to load keys from PostgreSQL:", err);
keysCache = [];
} }
// Also load seed keys from env // Also load seed keys from env
const envKeys = process.env.API_KEYS?.split(",").map((k) => k.trim()).filter(Boolean) || []; const envKeys = process.env.API_KEYS?.split(",").map((k) => k.trim()).filter(Boolean) || [];
for (const k of envKeys) { for (const k of envKeys) {
if (!store.keys.find((e) => e.key === k)) { if (!keysCache.find((e) => e.key === k)) {
store.keys.push({ key: k, tier: "pro", email: "seed@docfast.dev", createdAt: new Date().toISOString() }); const entry: ApiKey = { key: k, tier: "pro", email: "seed@docfast.dev", createdAt: new Date().toISOString() };
keysCache.push(entry);
// Upsert into DB
await pool.query(
`INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4)
ON CONFLICT (key) DO NOTHING`,
[k, "pro", "seed@docfast.dev", new Date().toISOString()]
).catch(() => {});
} }
} }
} }
function save(): void {
ensureDataDir();
writeFileSync(KEYS_FILE, JSON.stringify(store, null, 2));
}
export function isValidKey(key: string): boolean { export function isValidKey(key: string): boolean {
return store.keys.some((k) => k.key === key); return keysCache.some((k) => k.key === key);
} }
export function getKeyInfo(key: string): ApiKey | undefined { export function getKeyInfo(key: string): ApiKey | undefined {
return store.keys.find((k) => k.key === key); return keysCache.find((k) => k.key === key);
} }
export function isProKey(key: string): boolean { export function isProKey(key: string): boolean {
@ -67,10 +62,9 @@ function generateKey(prefix: string): string {
return `${prefix}_${randomBytes(24).toString("hex")}`; return `${prefix}_${randomBytes(24).toString("hex")}`;
} }
export function createFreeKey(email?: string): ApiKey { export async function createFreeKey(email?: string): Promise<ApiKey> {
// If email provided, check if it already has a free key
if (email) { if (email) {
const existing = store.keys.find((k) => k.email === email && k.tier === "free"); const existing = keysCache.find((k) => k.email === email && k.tier === "free");
if (existing) return existing; if (existing) return existing;
} }
@ -80,16 +74,20 @@ export function createFreeKey(email?: string): ApiKey {
email: email || "", email: email || "",
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
}; };
store.keys.push(entry);
save(); await pool.query(
"INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4)",
[entry.key, entry.tier, entry.email, entry.createdAt]
);
keysCache.push(entry);
return entry; return entry;
} }
export function createProKey(email: string, stripeCustomerId: string): ApiKey { export async function createProKey(email: string, stripeCustomerId: string): Promise<ApiKey> {
const existing = store.keys.find((k) => k.stripeCustomerId === stripeCustomerId); const existing = keysCache.find((k) => k.stripeCustomerId === stripeCustomerId);
if (existing) { if (existing) {
existing.tier = "pro"; existing.tier = "pro";
save(); await pool.query("UPDATE api_keys SET tier = 'pro' WHERE key = $1", [existing.key]);
return existing; return existing;
} }
@ -100,29 +98,34 @@ export function createProKey(email: string, stripeCustomerId: string): ApiKey {
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
stripeCustomerId, stripeCustomerId,
}; };
store.keys.push(entry);
save(); await pool.query(
"INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id) VALUES ($1, $2, $3, $4, $5)",
[entry.key, entry.tier, entry.email, entry.createdAt, entry.stripeCustomerId]
);
keysCache.push(entry);
return entry; return entry;
} }
export function revokeByCustomer(stripeCustomerId: string): boolean { export async function revokeByCustomer(stripeCustomerId: string): Promise<boolean> {
const idx = store.keys.findIndex((k) => k.stripeCustomerId === stripeCustomerId); const idx = keysCache.findIndex((k) => k.stripeCustomerId === stripeCustomerId);
if (idx >= 0) { if (idx >= 0) {
store.keys.splice(idx, 1); const key = keysCache[idx].key;
save(); keysCache.splice(idx, 1);
await pool.query("DELETE FROM api_keys WHERE key = $1", [key]);
return true; return true;
} }
return false; return false;
} }
export function getAllKeys(): ApiKey[] { export function getAllKeys(): ApiKey[] {
return [...store.keys]; return [...keysCache];
} }
export function updateKeyEmail(apiKey: string, newEmail: string): boolean { export async function updateKeyEmail(apiKey: string, newEmail: string): Promise<boolean> {
const entry = store.keys.find(k => k.key === apiKey); const entry = keysCache.find((k) => k.key === apiKey);
if (!entry) return false; if (!entry) return false;
entry.email = newEmail; entry.email = newEmail;
save(); await pool.query("UPDATE api_keys SET email = $1 WHERE key = $2", [newEmail, apiKey]);
return true; return true;
} }

View file

@ -1,11 +1,5 @@
import { randomBytes, randomInt } from "crypto"; import { randomBytes, randomInt } from "crypto";
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs"; import pool from "./db.js";
import path from "path";
import { fileURLToPath } from "url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const DATA_DIR = path.join(__dirname, "../../data");
const DB_PATH = path.join(DATA_DIR, "verifications.json");
export interface Verification { export interface Verification {
email: string; email: string;
@ -23,79 +17,68 @@ export interface PendingVerification {
attempts: number; attempts: number;
} }
let verifications: Verification[] = [];
let pendingVerifications: PendingVerification[] = [];
function ensureDataDir(): void {
if (!existsSync(DATA_DIR)) mkdirSync(DATA_DIR, { recursive: true });
}
function load(): void {
ensureDataDir();
if (existsSync(DB_PATH)) {
try {
const data = JSON.parse(readFileSync(DB_PATH, "utf-8"));
// Support both old format (array) and new format (object)
if (Array.isArray(data)) {
verifications = data;
pendingVerifications = [];
} else {
verifications = data.verifications || [];
pendingVerifications = data.pendingVerifications || [];
}
} catch {
verifications = [];
pendingVerifications = [];
}
}
}
function save(): void {
ensureDataDir();
writeFileSync(DB_PATH, JSON.stringify({ verifications, pendingVerifications }, null, 2));
}
load();
const TOKEN_EXPIRY_MS = 24 * 60 * 60 * 1000; const TOKEN_EXPIRY_MS = 24 * 60 * 60 * 1000;
const CODE_EXPIRY_MS = 15 * 60 * 1000; // 15 minutes const CODE_EXPIRY_MS = 15 * 60 * 1000;
const MAX_ATTEMPTS = 3; const MAX_ATTEMPTS = 3;
// Legacy token-based verification (keep for existing links) export async function createVerification(email: string, apiKey: string): Promise<Verification> {
export function createVerification(email: string, apiKey: string): Verification { // Check for existing unexpired, unverified
const existing = verifications.find(v => v.email === email && !v.verifiedAt); const existing = await pool.query(
if (existing) { "SELECT * FROM verifications WHERE email = $1 AND verified_at IS NULL AND created_at > NOW() - INTERVAL '24 hours' LIMIT 1",
const age = Date.now() - new Date(existing.createdAt).getTime(); [email]
if (age < TOKEN_EXPIRY_MS) return existing; );
verifications = verifications.filter(v => v !== existing); if (existing.rows.length > 0) {
const r = existing.rows[0];
return { email: r.email, token: r.token, apiKey: r.api_key, createdAt: r.created_at.toISOString(), verifiedAt: null };
} }
const verification: Verification = {
email, // Remove old unverified
token: randomBytes(32).toString("hex"), await pool.query("DELETE FROM verifications WHERE email = $1 AND verified_at IS NULL", [email]);
apiKey,
createdAt: new Date().toISOString(), const token = randomBytes(32).toString("hex");
verifiedAt: null, const now = new Date().toISOString();
}; await pool.query(
verifications.push(verification); "INSERT INTO verifications (email, token, api_key, created_at) VALUES ($1, $2, $3, $4)",
save(); [email, token, apiKey, now]
return verification; );
return { email, token, apiKey, createdAt: now, verifiedAt: null };
} }
export function verifyToken(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } { export function verifyToken(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } {
const v = verifications.find(v => v.token === token); // Synchronous wrapper — we'll make it async-compatible
// Actually need to keep sync for the GET /verify route. Use sync query workaround or refactor.
// For simplicity, we'll cache verifications in memory too.
return verifyTokenSync(token);
}
// In-memory cache for verifications (loaded on startup, updated on changes)
let verificationsCache: Verification[] = [];
export async function loadVerifications(): Promise<void> {
const result = await pool.query("SELECT * FROM verifications");
verificationsCache = result.rows.map((r) => ({
email: r.email,
token: r.token,
apiKey: r.api_key,
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
verifiedAt: r.verified_at ? (r.verified_at instanceof Date ? r.verified_at.toISOString() : r.verified_at) : null,
}));
}
function verifyTokenSync(token: string): { status: "ok"; verification: Verification } | { status: "invalid" | "expired" | "already_verified"; verification?: Verification } {
const v = verificationsCache.find((v) => v.token === token);
if (!v) return { status: "invalid" }; if (!v) return { status: "invalid" };
if (v.verifiedAt) return { status: "already_verified", verification: v }; if (v.verifiedAt) return { status: "already_verified", verification: v };
const age = Date.now() - new Date(v.createdAt).getTime(); const age = Date.now() - new Date(v.createdAt).getTime();
if (age > TOKEN_EXPIRY_MS) return { status: "expired" }; if (age > TOKEN_EXPIRY_MS) return { status: "expired" };
v.verifiedAt = new Date().toISOString(); v.verifiedAt = new Date().toISOString();
save(); // Update DB async
pool.query("UPDATE verifications SET verified_at = $1 WHERE token = $2", [v.verifiedAt, token]).catch(console.error);
return { status: "ok", verification: v }; return { status: "ok", verification: v };
} }
// New 6-digit code verification export async function createPendingVerification(email: string): Promise<PendingVerification> {
export function createPendingVerification(email: string): PendingVerification { await pool.query("DELETE FROM pending_verifications WHERE email = $1", [email]);
// Remove any existing pending for this email
pendingVerifications = pendingVerifications.filter(p => p.email !== email);
const now = new Date(); const now = new Date();
const pending: PendingVerification = { const pending: PendingVerification = {
@ -105,47 +88,53 @@ export function createPendingVerification(email: string): PendingVerification {
expiresAt: new Date(now.getTime() + CODE_EXPIRY_MS).toISOString(), expiresAt: new Date(now.getTime() + CODE_EXPIRY_MS).toISOString(),
attempts: 0, attempts: 0,
}; };
pendingVerifications.push(pending);
save(); await pool.query(
"INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts) VALUES ($1, $2, $3, $4, $5)",
[pending.email, pending.code, pending.createdAt, pending.expiresAt, pending.attempts]
);
return pending; return pending;
} }
export function verifyCode(email: string, code: string): { status: "ok" | "invalid" | "expired" | "max_attempts" } { export async function verifyCode(email: string, code: string): Promise<{ status: "ok" | "invalid" | "expired" | "max_attempts" }> {
const cleanEmail = email.trim().toLowerCase(); const cleanEmail = email.trim().toLowerCase();
const pending = pendingVerifications.find(p => p.email === cleanEmail); const result = await pool.query("SELECT * FROM pending_verifications WHERE email = $1", [cleanEmail]);
const pending = result.rows[0];
if (!pending) return { status: "invalid" }; if (!pending) return { status: "invalid" };
if (new Date() > new Date(pending.expiresAt)) { if (new Date() > new Date(pending.expires_at)) {
pendingVerifications = pendingVerifications.filter(p => p !== pending); await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]);
save();
return { status: "expired" }; return { status: "expired" };
} }
if (pending.attempts >= MAX_ATTEMPTS) { if (pending.attempts >= MAX_ATTEMPTS) {
pendingVerifications = pendingVerifications.filter(p => p !== pending); await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]);
save();
return { status: "max_attempts" }; return { status: "max_attempts" };
} }
pending.attempts++; await pool.query("UPDATE pending_verifications SET attempts = attempts + 1 WHERE email = $1", [cleanEmail]);
if (pending.code !== code) { if (pending.code !== code) {
save();
return { status: "invalid" }; return { status: "invalid" };
} }
// Success - remove pending await pool.query("DELETE FROM pending_verifications WHERE email = $1", [cleanEmail]);
pendingVerifications = pendingVerifications.filter(p => p !== pending);
save();
return { status: "ok" }; return { status: "ok" };
} }
export function isEmailVerified(email: string): boolean { export async function isEmailVerified(email: string): Promise<boolean> {
return verifications.some(v => v.email === email && v.verifiedAt !== null); const result = await pool.query(
"SELECT 1 FROM verifications WHERE email = $1 AND verified_at IS NOT NULL LIMIT 1",
[email]
);
return result.rows.length > 0;
} }
export function getVerifiedApiKey(email: string): string | null { export async function getVerifiedApiKey(email: string): Promise<string | null> {
const v = verifications.find(v => v.email === email && v.verifiedAt !== null); const result = await pool.query(
return v?.apiKey ?? null; "SELECT api_key FROM verifications WHERE email = $1 AND verified_at IS NOT NULL LIMIT 1",
[email]
);
return result.rows[0]?.api_key ?? null;
} }