feat: initial codebase v0.4.1
Some checks failed
Deploy to Staging / build-and-deploy (push) Failing after 9m44s
Some checks failed
Deploy to Staging / build-and-deploy (push) Failing after 9m44s
- Extract complete codebase from running staging pod - Add Dockerfile with multi-stage build for Node.js + Puppeteer - Configure CI/CD workflows for staging and production deployment - Include all source files, configs, and public assets
This commit is contained in:
commit
b58f634318
28 changed files with 5669 additions and 0 deletions
144
src/services/browser.ts
Normal file
144
src/services/browser.ts
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
import puppeteer, { Browser, Page } from "puppeteer";
|
||||
import logger from "./logger.js";
|
||||
|
||||
const BROWSER_COUNT = parseInt(process.env.BROWSER_COUNT || "2", 10);
|
||||
const PAGES_PER_BROWSER = parseInt(process.env.PAGES_PER_BROWSER || "4", 10);
|
||||
const RESTART_AFTER = 500;
|
||||
const RESTART_AFTER_MS = 60 * 60 * 1000;
|
||||
|
||||
interface BrowserInstance {
|
||||
browser: Browser;
|
||||
availablePages: Page[];
|
||||
jobCount: number;
|
||||
lastRestartTime: number;
|
||||
restarting: boolean;
|
||||
id: number;
|
||||
}
|
||||
|
||||
const instances: BrowserInstance[] = [];
|
||||
const waitingQueue: Array<{ resolve: (v: { page: Page; instance: BrowserInstance }) => void }> = [];
|
||||
let roundRobinIndex = 0;
|
||||
|
||||
export function getPoolStats() {
|
||||
const totalAvailable = instances.reduce((s, i) => s + i.availablePages.length, 0);
|
||||
return {
|
||||
browsers: instances.length,
|
||||
pagesPerBrowser: PAGES_PER_BROWSER,
|
||||
totalPages: instances.length * PAGES_PER_BROWSER,
|
||||
availablePages: totalAvailable,
|
||||
queueDepth: waitingQueue.length,
|
||||
totalJobs: instances.reduce((s, i) => s + i.jobCount, 0),
|
||||
};
|
||||
}
|
||||
|
||||
async function recyclePage(page: Page): Promise<void> {
|
||||
try {
|
||||
await page.goto("about:blank", { timeout: 5000 }).catch(() => {});
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async function createPages(b: Browser, count: number): Promise<Page[]> {
|
||||
const pages: Page[] = [];
|
||||
for (let i = 0; i < count; i++) pages.push(await b.newPage());
|
||||
return pages;
|
||||
}
|
||||
|
||||
function pickInstance(): BrowserInstance | null {
|
||||
for (let i = 0; i < instances.length; i++) {
|
||||
const idx = (roundRobinIndex + i) % instances.length;
|
||||
const inst = instances[idx];
|
||||
if (inst.availablePages.length > 0 && !inst.restarting) {
|
||||
roundRobinIndex = (idx + 1) % instances.length;
|
||||
return inst;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function acquirePage(): Promise<{ page: Page; instance: BrowserInstance }> {
|
||||
for (const inst of instances) {
|
||||
if (!inst.restarting && (inst.jobCount >= RESTART_AFTER || Date.now() - inst.lastRestartTime >= RESTART_AFTER_MS)) {
|
||||
scheduleRestart(inst);
|
||||
}
|
||||
}
|
||||
|
||||
const inst = pickInstance();
|
||||
if (inst) {
|
||||
return { page: inst.availablePages.pop()!, instance: inst };
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(() => {
|
||||
const idx = waitingQueue.findIndex(w => w.resolve === resolve);
|
||||
if (idx >= 0) waitingQueue.splice(idx, 1);
|
||||
reject(new Error("QUEUE_FULL"));
|
||||
}, 30_000);
|
||||
waitingQueue.push({ resolve: (v) => { clearTimeout(timer); resolve(v); } });
|
||||
});
|
||||
}
|
||||
|
||||
export function releasePage(page: Page, inst: BrowserInstance): void {
|
||||
inst.jobCount++;
|
||||
const waiter = waitingQueue.shift();
|
||||
if (waiter) {
|
||||
recyclePage(page).then(() => waiter.resolve({ page, instance: inst })).catch(() => {
|
||||
waitingQueue.unshift(waiter);
|
||||
});
|
||||
return;
|
||||
}
|
||||
recyclePage(page).then(() => inst.availablePages.push(page)).catch(() => {});
|
||||
}
|
||||
|
||||
async function scheduleRestart(inst: BrowserInstance): Promise<void> {
|
||||
if (inst.restarting) return;
|
||||
inst.restarting = true;
|
||||
logger.info(`Scheduling browser ${inst.id} restart`);
|
||||
|
||||
// Wait for pages to drain (max 30s)
|
||||
await Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
const check = () => {
|
||||
if (inst.availablePages.length === PAGES_PER_BROWSER) resolve();
|
||||
else setTimeout(check, 100);
|
||||
};
|
||||
check();
|
||||
}),
|
||||
new Promise<void>(r => setTimeout(r, 30000)),
|
||||
]);
|
||||
|
||||
for (const page of inst.availablePages) await page.close().catch(() => {});
|
||||
inst.availablePages.length = 0;
|
||||
try { await inst.browser.close(); } catch {}
|
||||
|
||||
inst.browser = await puppeteer.launch({
|
||||
headless: true,
|
||||
executablePath: process.env.PUPPETEER_EXECUTABLE_PATH || undefined,
|
||||
args: ["--no-sandbox", "--disable-setuid-sandbox", "--disable-gpu", "--disable-dev-shm-usage"],
|
||||
});
|
||||
inst.availablePages.push(...await createPages(inst.browser, PAGES_PER_BROWSER));
|
||||
inst.jobCount = 0;
|
||||
inst.lastRestartTime = Date.now();
|
||||
inst.restarting = false;
|
||||
logger.info(`Browser ${inst.id} restarted`);
|
||||
}
|
||||
|
||||
export async function initBrowser(): Promise<void> {
|
||||
for (let i = 0; i < BROWSER_COUNT; i++) {
|
||||
const browser = await puppeteer.launch({
|
||||
headless: true,
|
||||
executablePath: process.env.PUPPETEER_EXECUTABLE_PATH || undefined,
|
||||
args: ["--no-sandbox", "--disable-setuid-sandbox", "--disable-gpu", "--disable-dev-shm-usage"],
|
||||
});
|
||||
const pages = await createPages(browser, PAGES_PER_BROWSER);
|
||||
instances.push({ browser, availablePages: pages, jobCount: 0, lastRestartTime: Date.now(), restarting: false, id: i });
|
||||
}
|
||||
logger.info(`Browser pool ready (${BROWSER_COUNT}×${PAGES_PER_BROWSER} = ${BROWSER_COUNT * PAGES_PER_BROWSER} pages)`);
|
||||
}
|
||||
|
||||
export async function closeBrowser(): Promise<void> {
|
||||
for (const inst of instances) {
|
||||
for (const page of inst.availablePages) await page.close().catch(() => {});
|
||||
await inst.browser.close().catch(() => {});
|
||||
}
|
||||
instances.length = 0;
|
||||
}
|
||||
100
src/services/db.ts
Normal file
100
src/services/db.ts
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
import pg from "pg";
|
||||
import logger from "./logger.js";
|
||||
const { Pool } = pg;
|
||||
|
||||
const TRANSIENT_ERRORS = new Set([
|
||||
"ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT",
|
||||
"57P01", "57P02", "57P03", "08006", "08003", "08001",
|
||||
]);
|
||||
|
||||
const pool = new Pool({
|
||||
host: process.env.DATABASE_HOST || "main-db-pooler.postgres.svc",
|
||||
port: parseInt(process.env.DATABASE_PORT || "5432", 10),
|
||||
database: process.env.DATABASE_NAME || "snapapi",
|
||||
user: process.env.DATABASE_USER || "docfast",
|
||||
password: process.env.DATABASE_PASSWORD || "docfast",
|
||||
max: 10,
|
||||
idleTimeoutMillis: 10000,
|
||||
connectionTimeoutMillis: 5000,
|
||||
keepAlive: true,
|
||||
keepAliveInitialDelayMillis: 10000,
|
||||
});
|
||||
|
||||
pool.on("error", (err) => {
|
||||
logger.error({ err }, "Unexpected error on idle PostgreSQL client");
|
||||
});
|
||||
|
||||
function isTransientError(err: any): boolean {
|
||||
if (!err) return false;
|
||||
const code = err.code || "";
|
||||
const msg = (err.message || "").toLowerCase();
|
||||
if (TRANSIENT_ERRORS.has(code)) return true;
|
||||
if (msg.includes("no available server") || msg.includes("connection terminated") || msg.includes("connection refused")) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function queryWithRetry(text: string, params?: any[], maxRetries = 3): Promise<pg.QueryResult> {
|
||||
let lastError: any;
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
let client: pg.PoolClient | undefined;
|
||||
try {
|
||||
client = await pool.connect();
|
||||
const result = await client.query(text, params);
|
||||
client.release();
|
||||
return result;
|
||||
} catch (err: any) {
|
||||
if (client) try { client.release(true); } catch (_) {}
|
||||
lastError = err;
|
||||
if (!isTransientError(err) || attempt === maxRetries) throw err;
|
||||
const delayMs = Math.min(1000 * Math.pow(2, attempt), 5000);
|
||||
logger.warn({ err: err.message, attempt: attempt + 1 }, "Transient DB error, retrying...");
|
||||
await new Promise(r => setTimeout(r, delayMs));
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
export async function connectWithRetry(maxRetries = 3): Promise<pg.PoolClient> {
|
||||
let lastError: any;
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
const client = await pool.connect();
|
||||
await client.query("SELECT 1");
|
||||
return client;
|
||||
} catch (err: any) {
|
||||
lastError = err;
|
||||
if (!isTransientError(err) || attempt === maxRetries) throw err;
|
||||
await new Promise(r => setTimeout(r, Math.min(1000 * Math.pow(2, attempt), 5000)));
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
export async function initDatabase(): Promise<void> {
|
||||
const client = await connectWithRetry();
|
||||
try {
|
||||
await client.query(`
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
key TEXT PRIMARY KEY,
|
||||
tier TEXT NOT NULL DEFAULT 'free',
|
||||
email TEXT NOT NULL DEFAULT '',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
stripe_customer_id TEXT
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS usage (
|
||||
key TEXT NOT NULL,
|
||||
month_key TEXT NOT NULL,
|
||||
count INT NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (key, month_key)
|
||||
);
|
||||
`);
|
||||
logger.info("PostgreSQL tables initialized");
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
export { pool };
|
||||
export default pool;
|
||||
207
src/services/keys.ts
Normal file
207
src/services/keys.ts
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
import { randomBytes } from "crypto";
|
||||
import logger from "./logger.js";
|
||||
import { queryWithRetry } from "./db.js";
|
||||
|
||||
export interface ApiKey {
|
||||
key: string;
|
||||
tier: "free" | "starter" | "pro" | "business";
|
||||
email: string;
|
||||
createdAt: string;
|
||||
stripeCustomerId?: string;
|
||||
}
|
||||
|
||||
let keysCache: ApiKey[] = [];
|
||||
|
||||
export async function loadKeys(): Promise<void> {
|
||||
try {
|
||||
const result = await queryWithRetry("SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys");
|
||||
keysCache = result.rows.map(r => ({
|
||||
key: r.key,
|
||||
tier: r.tier,
|
||||
email: r.email,
|
||||
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
|
||||
stripeCustomerId: r.stripe_customer_id || undefined,
|
||||
}));
|
||||
} catch (err) {
|
||||
logger.error({ err }, "Failed to load keys");
|
||||
keysCache = [];
|
||||
}
|
||||
|
||||
// Seed keys from env
|
||||
const envKeys = process.env.API_KEYS?.split(",").map(k => k.trim()).filter(Boolean) || [];
|
||||
for (const k of envKeys) {
|
||||
if (!keysCache.find(e => e.key === k)) {
|
||||
const entry: ApiKey = { key: k, tier: "business", email: "admin@snapapi.dev", createdAt: new Date().toISOString() };
|
||||
keysCache.push(entry);
|
||||
await queryWithRetry(
|
||||
`INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4) ON CONFLICT (key) DO NOTHING`,
|
||||
[k, "business", entry.email, entry.createdAt]
|
||||
).catch(() => {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache-aside: check DB if key not in memory (multi-replica support)
|
||||
async function fetchKeyFromDb(key: string): Promise<ApiKey | undefined> {
|
||||
try {
|
||||
const result = await queryWithRetry(
|
||||
"SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys WHERE key = $1",
|
||||
[key]
|
||||
);
|
||||
if (result.rows.length === 0) return undefined;
|
||||
const r = result.rows[0];
|
||||
const entry: ApiKey = {
|
||||
key: r.key,
|
||||
tier: r.tier,
|
||||
email: r.email,
|
||||
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
|
||||
stripeCustomerId: r.stripe_customer_id || undefined,
|
||||
};
|
||||
// Add to local cache
|
||||
if (!keysCache.find(k => k.key === entry.key)) {
|
||||
keysCache.push(entry);
|
||||
}
|
||||
return entry;
|
||||
} catch (err) {
|
||||
logger.error({ err }, "Failed to fetch key from DB");
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export async function isValidKey(key: string): Promise<boolean> {
|
||||
if (keysCache.some(k => k.key === key)) return true;
|
||||
const fetched = await fetchKeyFromDb(key);
|
||||
return fetched !== undefined;
|
||||
}
|
||||
|
||||
export async function getKeyInfo(key: string): Promise<ApiKey | undefined> {
|
||||
const cached = keysCache.find(k => k.key === key);
|
||||
if (cached) return cached;
|
||||
return fetchKeyFromDb(key);
|
||||
}
|
||||
|
||||
function generateKey(): string {
|
||||
return `snap_${randomBytes(24).toString("hex")}`;
|
||||
}
|
||||
|
||||
export async function createKey(email: string, tier: ApiKey["tier"] = "free"): Promise<ApiKey> {
|
||||
// For free tier, check DB too (another pod might have created it)
|
||||
if (tier === "free") {
|
||||
const existing = keysCache.find(k => k.email === email && k.tier === "free");
|
||||
if (existing) return existing;
|
||||
// Also check DB
|
||||
try {
|
||||
const result = await queryWithRetry(
|
||||
"SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys WHERE email = $1 AND tier = $2",
|
||||
[email, "free"]
|
||||
);
|
||||
if (result.rows.length > 0) {
|
||||
const r = result.rows[0];
|
||||
const entry: ApiKey = {
|
||||
key: r.key, tier: r.tier, email: r.email,
|
||||
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
|
||||
stripeCustomerId: r.stripe_customer_id || undefined,
|
||||
};
|
||||
if (!keysCache.find(k => k.key === entry.key)) keysCache.push(entry);
|
||||
return entry;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const entry: ApiKey = {
|
||||
key: generateKey(),
|
||||
tier,
|
||||
email,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await queryWithRetry(
|
||||
"INSERT INTO api_keys (key, tier, email, created_at) VALUES ($1, $2, $3, $4)",
|
||||
[entry.key, entry.tier, entry.email, entry.createdAt]
|
||||
);
|
||||
keysCache.push(entry);
|
||||
return entry;
|
||||
}
|
||||
|
||||
export function getAllKeys(): ApiKey[] {
|
||||
return [...keysCache];
|
||||
}
|
||||
|
||||
export function getTierLimit(tier: string): number {
|
||||
switch (tier) {
|
||||
case "free": return 100;
|
||||
case "starter": return 1000;
|
||||
case "pro": return 5000;
|
||||
case "business": return 25000;
|
||||
default: return 100;
|
||||
}
|
||||
}
|
||||
|
||||
export async function createPaidKey(email: string, tier: "starter" | "pro" | "business", stripeCustomerId?: string): Promise<ApiKey> {
|
||||
// Check if customer already has a key
|
||||
if (stripeCustomerId) {
|
||||
try {
|
||||
const result = await queryWithRetry(
|
||||
"SELECT key, tier, email, created_at, stripe_customer_id FROM api_keys WHERE stripe_customer_id = $1",
|
||||
[stripeCustomerId]
|
||||
);
|
||||
if (result.rows.length > 0) {
|
||||
const r = result.rows[0];
|
||||
const entry: ApiKey = {
|
||||
key: r.key, tier: r.tier, email: r.email,
|
||||
createdAt: r.created_at instanceof Date ? r.created_at.toISOString() : r.created_at,
|
||||
stripeCustomerId: r.stripe_customer_id || undefined,
|
||||
};
|
||||
// Update tier if upgrading
|
||||
if (entry.tier !== tier) {
|
||||
await queryWithRetry("UPDATE api_keys SET tier = $1 WHERE stripe_customer_id = $2", [tier, stripeCustomerId]);
|
||||
entry.tier = tier;
|
||||
const cached = keysCache.find(k => k.key === entry.key);
|
||||
if (cached) cached.tier = tier;
|
||||
}
|
||||
if (!keysCache.find(k => k.key === entry.key)) keysCache.push(entry);
|
||||
return entry;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const entry: ApiKey = {
|
||||
key: generateKey(),
|
||||
tier,
|
||||
email,
|
||||
createdAt: new Date().toISOString(),
|
||||
stripeCustomerId,
|
||||
};
|
||||
|
||||
await queryWithRetry(
|
||||
"INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id) VALUES ($1, $2, $3, $4, $5)",
|
||||
[entry.key, entry.tier, entry.email, entry.createdAt, entry.stripeCustomerId || null]
|
||||
);
|
||||
keysCache.push(entry);
|
||||
logger.info({ email, tier, stripeCustomerId }, "Created paid API key");
|
||||
return entry;
|
||||
}
|
||||
|
||||
export async function downgradeByCustomer(customerId: string): Promise<void> {
|
||||
await queryWithRetry(
|
||||
"UPDATE api_keys SET tier = 'free', stripe_customer_id = NULL WHERE stripe_customer_id = $1",
|
||||
[customerId]
|
||||
);
|
||||
for (const k of keysCache) {
|
||||
if (k.stripeCustomerId === customerId) {
|
||||
k.tier = "free";
|
||||
k.stripeCustomerId = undefined;
|
||||
}
|
||||
}
|
||||
logger.info({ customerId }, "Downgraded customer to free");
|
||||
}
|
||||
|
||||
export async function updateEmailByCustomer(customerId: string, newEmail: string): Promise<void> {
|
||||
await queryWithRetry(
|
||||
"UPDATE api_keys SET email = $1 WHERE stripe_customer_id = $2",
|
||||
[newEmail, customerId]
|
||||
);
|
||||
for (const k of keysCache) {
|
||||
if (k.stripeCustomerId === customerId) k.email = newEmail;
|
||||
}
|
||||
}
|
||||
3
src/services/logger.ts
Normal file
3
src/services/logger.ts
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import pino from "pino";
|
||||
const logger = pino({ level: process.env.LOG_LEVEL || "info" });
|
||||
export default logger;
|
||||
74
src/services/screenshot.ts
Normal file
74
src/services/screenshot.ts
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import { Page } from "puppeteer";
|
||||
import { acquirePage, releasePage } from "./browser.js";
|
||||
import { validateUrl } from "./ssrf.js";
|
||||
import logger from "./logger.js";
|
||||
|
||||
export interface ScreenshotOptions {
|
||||
url: string;
|
||||
format?: "png" | "jpeg" | "webp";
|
||||
width?: number;
|
||||
height?: number;
|
||||
fullPage?: boolean;
|
||||
quality?: number;
|
||||
waitForSelector?: string;
|
||||
deviceScale?: number;
|
||||
delay?: number;
|
||||
}
|
||||
|
||||
export interface ScreenshotResult {
|
||||
buffer: Buffer;
|
||||
contentType: string;
|
||||
}
|
||||
|
||||
const MAX_WIDTH = 3840;
|
||||
const MAX_HEIGHT = 2160;
|
||||
const TIMEOUT_MS = 30_000;
|
||||
|
||||
export async function takeScreenshot(opts: ScreenshotOptions): Promise<ScreenshotResult> {
|
||||
// Validate URL for SSRF
|
||||
await validateUrl(opts.url);
|
||||
|
||||
const format = opts.format || "png";
|
||||
const width = Math.min(opts.width || 1280, MAX_WIDTH);
|
||||
const height = Math.min(opts.height || 800, MAX_HEIGHT);
|
||||
const fullPage = opts.fullPage ?? false;
|
||||
const quality = format === "png" ? undefined : Math.min(Math.max(opts.quality || 80, 1), 100);
|
||||
const deviceScale = Math.min(opts.deviceScale || 1, 3);
|
||||
|
||||
const { page, instance } = await acquirePage();
|
||||
|
||||
try {
|
||||
await page.setViewport({ width, height, deviceScaleFactor: deviceScale });
|
||||
|
||||
await Promise.race([
|
||||
(async () => {
|
||||
await page.goto(opts.url, { waitUntil: "networkidle2", timeout: 20_000 });
|
||||
|
||||
if (opts.waitForSelector) {
|
||||
await page.waitForSelector(opts.waitForSelector, { timeout: 10_000 });
|
||||
}
|
||||
|
||||
if (opts.delay && opts.delay > 0) {
|
||||
await new Promise(r => setTimeout(r, Math.min(opts.delay!, 5000)));
|
||||
}
|
||||
})(),
|
||||
new Promise<never>((_, reject) => setTimeout(() => reject(new Error("SCREENSHOT_TIMEOUT")), TIMEOUT_MS)),
|
||||
]);
|
||||
|
||||
const screenshotOpts: any = {
|
||||
type: format === "webp" ? "webp" : format,
|
||||
fullPage,
|
||||
encoding: "binary",
|
||||
};
|
||||
if (quality !== undefined) screenshotOpts.quality = quality;
|
||||
|
||||
const result = await page.screenshot(screenshotOpts);
|
||||
const buffer = Buffer.from(result as unknown as ArrayBuffer);
|
||||
|
||||
const contentType = format === "png" ? "image/png" : format === "jpeg" ? "image/jpeg" : "image/webp";
|
||||
|
||||
return { buffer, contentType };
|
||||
} finally {
|
||||
releasePage(page, instance);
|
||||
}
|
||||
}
|
||||
65
src/services/ssrf.ts
Normal file
65
src/services/ssrf.ts
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import { lookup } from "dns/promises";
|
||||
import logger from "./logger.js";
|
||||
|
||||
// Block private, loopback, link-local, metadata IPs
|
||||
const BLOCKED_RANGES = [
|
||||
/^127\./,
|
||||
/^10\./,
|
||||
/^172\.(1[6-9]|2[0-9]|3[01])\./,
|
||||
/^192\.168\./,
|
||||
/^169\.254\./,
|
||||
/^0\./,
|
||||
/^::1$/,
|
||||
/^fe80:/i,
|
||||
/^fc00:/i,
|
||||
/^fd00:/i,
|
||||
];
|
||||
|
||||
const BLOCKED_HOSTS = [
|
||||
/\.svc$/,
|
||||
/\.svc\./,
|
||||
/\.cluster\.local$/,
|
||||
/\.internal$/,
|
||||
/^localhost$/,
|
||||
/^kubernetes/,
|
||||
];
|
||||
|
||||
export async function validateUrl(urlStr: string): Promise<{ hostname: string; resolvedIp: string }> {
|
||||
let parsed: URL;
|
||||
try {
|
||||
parsed = new URL(urlStr);
|
||||
} catch {
|
||||
throw new Error("Invalid URL");
|
||||
}
|
||||
|
||||
if (!["http:", "https:"].includes(parsed.protocol)) {
|
||||
throw new Error("Only HTTP and HTTPS URLs are allowed");
|
||||
}
|
||||
|
||||
const hostname = parsed.hostname;
|
||||
|
||||
// Check blocked hostnames
|
||||
for (const pattern of BLOCKED_HOSTS) {
|
||||
if (pattern.test(hostname)) {
|
||||
throw new Error("URL hostname is not allowed");
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve DNS and check IP
|
||||
let ip: string;
|
||||
try {
|
||||
const result = await lookup(hostname);
|
||||
ip = result.address;
|
||||
} catch {
|
||||
throw new Error("Could not resolve hostname");
|
||||
}
|
||||
|
||||
for (const pattern of BLOCKED_RANGES) {
|
||||
if (pattern.test(ip)) {
|
||||
logger.warn({ hostname, ip }, "SSRF attempt blocked");
|
||||
throw new Error("URL resolves to a blocked IP range");
|
||||
}
|
||||
}
|
||||
|
||||
return { hostname, resolvedIp: ip };
|
||||
}
|
||||
62
src/services/watermark.ts
Normal file
62
src/services/watermark.ts
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
/**
|
||||
* Adds a text watermark to a PNG/JPEG/WebP screenshot buffer.
|
||||
* Uses pure SVG overlay composited via Puppeteer's page.evaluate or
|
||||
* a simpler approach: we re-render an HTML page with the image + watermark overlay.
|
||||
*
|
||||
* Since we already have Puppeteer, the simplest reliable approach is to
|
||||
* render an HTML page with the screenshot as background + CSS text overlay.
|
||||
* But that's expensive (double render). Instead, use a lightweight approach:
|
||||
* encode watermark text directly into the PNG using canvas-less SVG trick.
|
||||
*
|
||||
* Simplest production approach: use sharp if available, or fallback to
|
||||
* Puppeteer overlay. Since we don't want to add sharp (large native dep),
|
||||
* we'll use Puppeteer to composite.
|
||||
*/
|
||||
import { acquirePage, releasePage } from "./browser.js";
|
||||
|
||||
export async function addWatermark(imageBuffer: Buffer, width: number, height: number): Promise<Buffer> {
|
||||
const { page, instance } = await acquirePage();
|
||||
try {
|
||||
const b64 = imageBuffer.toString("base64");
|
||||
const dataUrl = `data:image/png;base64,${b64}`;
|
||||
|
||||
await page.setViewport({ width, height });
|
||||
|
||||
// Render the image with a watermark overlay
|
||||
await page.setContent(`
|
||||
<!DOCTYPE html>
|
||||
<html><head><style>
|
||||
* { margin: 0; padding: 0; }
|
||||
body { width: ${width}px; height: ${height}px; position: relative; overflow: hidden; }
|
||||
img { width: 100%; height: 100%; object-fit: cover; display: block; }
|
||||
.watermark {
|
||||
position: absolute; top: 0; left: 0; right: 0; bottom: 0;
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
pointer-events: none;
|
||||
}
|
||||
.watermark-text {
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-size: ${Math.max(width / 20, 24)}px;
|
||||
font-weight: 900;
|
||||
color: rgba(255, 255, 255, 0.35);
|
||||
text-shadow: 2px 2px 8px rgba(0, 0, 0, 0.5);
|
||||
transform: rotate(-30deg);
|
||||
white-space: nowrap;
|
||||
letter-spacing: 2px;
|
||||
user-select: none;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
</style></head><body>
|
||||
<img src="${dataUrl}">
|
||||
<div class="watermark">
|
||||
<div class="watermark-text">snapapi.eu — upgrade for clean screenshots</div>
|
||||
</div>
|
||||
</body></html>
|
||||
`, { waitUntil: "load" });
|
||||
|
||||
const result = await page.screenshot({ type: "png", encoding: "binary" });
|
||||
return Buffer.from(result as unknown as ArrayBuffer);
|
||||
} finally {
|
||||
releasePage(page, instance);
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue