- Replace JSON file storage with PostgreSQL (pg package) - Add db.ts service for connection pool and schema init - Rewrite keys.ts, verification.ts, usage.ts for async PostgreSQL - Update all routes for async function signatures - Add migration script (scripts/migrate-to-postgres.mjs) - Update docker-compose.yml with DATABASE_* env vars - Change SLA from 99.9% to 99.5% in landing page
143 lines
4.8 KiB
JavaScript
143 lines
4.8 KiB
JavaScript
#!/usr/bin/env node
|
|
/**
|
|
* Migration script: JSON files → PostgreSQL
|
|
* Run on the server where JSON data files exist.
|
|
* Usage: DATABASE_PASSWORD=docfast node scripts/migrate-to-postgres.mjs
|
|
*/
|
|
import pg from "pg";
|
|
import { readFileSync, existsSync } from "fs";
|
|
|
|
const { Pool } = pg;
|
|
|
|
const pool = new Pool({
|
|
host: process.env.DATABASE_HOST || "127.0.0.1",
|
|
port: parseInt(process.env.DATABASE_PORT || "5432", 10),
|
|
database: process.env.DATABASE_NAME || "docfast",
|
|
user: process.env.DATABASE_USER || "docfast",
|
|
password: process.env.DATABASE_PASSWORD || "docfast",
|
|
});
|
|
|
|
async function migrate() {
|
|
const client = await pool.connect();
|
|
try {
|
|
// Create tables
|
|
await client.query(`
|
|
CREATE TABLE IF NOT EXISTS api_keys (
|
|
key TEXT PRIMARY KEY,
|
|
tier TEXT NOT NULL DEFAULT 'free',
|
|
email TEXT NOT NULL DEFAULT '',
|
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
stripe_customer_id TEXT
|
|
);
|
|
CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
|
|
CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id);
|
|
|
|
CREATE TABLE IF NOT EXISTS verifications (
|
|
id SERIAL PRIMARY KEY,
|
|
email TEXT NOT NULL,
|
|
token TEXT NOT NULL UNIQUE,
|
|
api_key TEXT NOT NULL,
|
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
verified_at TIMESTAMPTZ
|
|
);
|
|
CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email);
|
|
CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token);
|
|
|
|
CREATE TABLE IF NOT EXISTS pending_verifications (
|
|
email TEXT PRIMARY KEY,
|
|
code TEXT NOT NULL,
|
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
expires_at TIMESTAMPTZ NOT NULL,
|
|
attempts INT NOT NULL DEFAULT 0
|
|
);
|
|
|
|
CREATE TABLE IF NOT EXISTS usage (
|
|
key TEXT PRIMARY KEY,
|
|
count INT NOT NULL DEFAULT 0,
|
|
month_key TEXT NOT NULL
|
|
);
|
|
`);
|
|
console.log("✅ Tables created");
|
|
|
|
// Migrate keys.json
|
|
const keysPath = "/opt/docfast/data/keys.json";
|
|
if (existsSync(keysPath)) {
|
|
const keysData = JSON.parse(readFileSync(keysPath, "utf-8"));
|
|
const keys = keysData.keys || [];
|
|
let keyCount = 0;
|
|
for (const k of keys) {
|
|
await client.query(
|
|
`INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id)
|
|
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (key) DO NOTHING`,
|
|
[k.key, k.tier, k.email || "", k.createdAt, k.stripeCustomerId || null]
|
|
);
|
|
keyCount++;
|
|
}
|
|
console.log(`✅ Migrated ${keyCount} API keys`);
|
|
} else {
|
|
// Try docker volume path
|
|
console.log("⚠️ keys.json not found at", keysPath);
|
|
}
|
|
|
|
// Migrate verifications.json
|
|
const verifPath = "/opt/docfast/data/verifications.json";
|
|
if (existsSync(verifPath)) {
|
|
const data = JSON.parse(readFileSync(verifPath, "utf-8"));
|
|
const verifications = Array.isArray(data) ? data : (data.verifications || []);
|
|
const pending = data.pendingVerifications || [];
|
|
|
|
let vCount = 0;
|
|
for (const v of verifications) {
|
|
await client.query(
|
|
`INSERT INTO verifications (email, token, api_key, created_at, verified_at)
|
|
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (token) DO NOTHING`,
|
|
[v.email, v.token, v.apiKey, v.createdAt, v.verifiedAt || null]
|
|
);
|
|
vCount++;
|
|
}
|
|
console.log(`✅ Migrated ${vCount} verifications`);
|
|
|
|
let pCount = 0;
|
|
for (const p of pending) {
|
|
await client.query(
|
|
`INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts)
|
|
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (email) DO NOTHING`,
|
|
[p.email, p.code, p.createdAt, p.expiresAt, p.attempts]
|
|
);
|
|
pCount++;
|
|
}
|
|
console.log(`✅ Migrated ${pCount} pending verifications`);
|
|
} else {
|
|
console.log("⚠️ verifications.json not found at", verifPath);
|
|
}
|
|
|
|
// Migrate usage.json
|
|
const usagePath = "/opt/docfast/data/usage.json";
|
|
if (existsSync(usagePath)) {
|
|
const usageData = JSON.parse(readFileSync(usagePath, "utf-8"));
|
|
let uCount = 0;
|
|
for (const [key, record] of Object.entries(usageData)) {
|
|
const r = record as any;
|
|
await client.query(
|
|
`INSERT INTO usage (key, count, month_key)
|
|
VALUES ($1, $2, $3) ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`,
|
|
[key, r.count, r.monthKey]
|
|
);
|
|
uCount++;
|
|
}
|
|
console.log(`✅ Migrated ${uCount} usage records`);
|
|
} else {
|
|
console.log("⚠️ usage.json not found at", usagePath);
|
|
}
|
|
|
|
console.log("\n🎉 Migration complete!");
|
|
} finally {
|
|
client.release();
|
|
await pool.end();
|
|
}
|
|
}
|
|
|
|
migrate().catch((err) => {
|
|
console.error("Migration failed:", err);
|
|
process.exit(1);
|
|
});
|