diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..0eb4c73
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,10 @@
+node_modules
+.git
+.gitignore
+*.md
+src/__tests__
+vitest.config.ts
+.env*
+.credentials
+memory
+dist
diff --git a/.forgejo/workflows/deploy.yml b/.forgejo/workflows/deploy.yml
index 9dff451..22f0f68 100644
--- a/.forgejo/workflows/deploy.yml
+++ b/.forgejo/workflows/deploy.yml
@@ -13,6 +13,19 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: '22'
+
+ - name: Install dependencies
+ run: npm ci
+
+ - name: Run tests
+ run: npm test
+ env:
+ NODE_ENV: test
+
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@@ -31,6 +44,7 @@ jobs:
with:
context: .
push: true
+ no-cache: true
tags: |
git.cloonar.com/openclawd/docfast:latest
git.cloonar.com/openclawd/docfast:${{ github.sha }}
diff --git a/.forgejo/workflows/promote.yml b/.forgejo/workflows/promote.yml
index 2fae872..f7b861c 100644
--- a/.forgejo/workflows/promote.yml
+++ b/.forgejo/workflows/promote.yml
@@ -11,18 +11,24 @@ jobs:
runs-on: ubuntu-latest
steps:
+ - name: Checkout code at tag
+ uses: actions/checkout@v4
+
- name: Install kubectl
run: |
curl -sLO "https://dl.k8s.io/release/$(curl -sL https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
chmod +x kubectl
- - name: Get image from tag
+ - name: Get image info
id: image
run: |
- # Tag format: v0.2.1 or v0.2.1-rc1
- # The staging pipeline already pushed the image with the commit SHA
- # We retag with the version tag for traceability
+ # Use the commit SHA instead of "latest" to avoid a race condition:
+ # The tag event can fire before the staging build (deploy.yml) finishes
+ # pushing the new "latest" image. By referencing the exact SHA that
+ # deploy.yml tags images with (${{ github.sha }}), we ensure we
+ # promote the correct build — and wait for it if it's still running.
echo "tag=${{ github.ref_name }}" >> "$GITHUB_OUTPUT"
+ echo "sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
- name: Login to Forgejo Registry
uses: docker/login-action@v3
@@ -31,13 +37,28 @@ jobs:
username: openclawd
password: ${{ secrets.REGISTRY_TOKEN }}
- - name: Retag image for production
+ - name: Wait for staging image and retag for production
run: |
- # Pull latest staging image and tag with version
- docker pull --platform linux/arm64 git.cloonar.com/openclawd/docfast:latest
- docker tag git.cloonar.com/openclawd/docfast:latest \
- git.cloonar.com/openclawd/docfast:${{ steps.image.outputs.tag }}
- docker push git.cloonar.com/openclawd/docfast:${{ steps.image.outputs.tag }}
+ SHA_IMAGE="git.cloonar.com/openclawd/docfast:${{ steps.image.outputs.sha }}"
+ PROD_IMAGE="git.cloonar.com/openclawd/docfast:${{ steps.image.outputs.tag }}"
+
+ # Wait for the SHA-tagged image (built by staging) to be available
+ for i in $(seq 1 20); do
+ echo "Attempt $i/20: pulling $SHA_IMAGE ..."
+ if docker pull --platform linux/arm64 "$SHA_IMAGE" 2>/dev/null; then
+ echo "✅ Image found!"
+ break
+ fi
+ if [ "$i" -eq 20 ]; then
+ echo "❌ Image not available after 10 minutes. Aborting."
+ exit 1
+ fi
+ echo "Image not ready yet, waiting 30s..."
+ sleep 30
+ done
+
+ docker tag "$SHA_IMAGE" "$PROD_IMAGE"
+ docker push "$PROD_IMAGE"
- name: Deploy to Production
run: |
diff --git a/BACKUP_PROCEDURES.md b/BACKUP_PROCEDURES.md
deleted file mode 100644
index 52106ca..0000000
--- a/BACKUP_PROCEDURES.md
+++ /dev/null
@@ -1,184 +0,0 @@
-# DocFast Backup & Disaster Recovery Procedures
-
-## Overview
-DocFast now uses BorgBackup for full disaster recovery backups. The system backs up all critical components needed to restore the service on a new server.
-
-## What is Backed Up
-- **PostgreSQL database** - Full database dump with schema and data
-- **Docker volumes** - Application data and files
-- **Nginx configuration** - Web server configuration
-- **SSL certificates** - Let's Encrypt certificates and keys
-- **Crontabs** - Scheduled tasks
-- **OpenDKIM keys** - Email authentication keys
-- **DocFast application files** - docker-compose.yml, .env, scripts
-- **System information** - Installed packages, enabled services, disk usage
-
-## Backup Location & Schedule
-
-### Current Setup (Local)
-- **Location**: `/opt/borg-backups/docfast`
-- **Schedule**: Daily at 03:00 UTC
-- **Retention**: 7 daily + 4 weekly + 3 monthly backups
-- **Compression**: LZ4 (fast compression/decompression)
-- **Encryption**: repokey mode (encrypted with passphrase)
-
-### Security
-- **Passphrase**: `docfast-backup-YYYY` (where YYYY is current year)
-- **Key backup**: Stored in `/opt/borg-backups/docfast-key-backup.txt`
-- **⚠️ IMPORTANT**: Both passphrase AND key are required for restore!
-
-## Scripts
-
-### Backup Script: `/opt/docfast-borg-backup.sh`
-- Automated backup creation
-- Runs via cron daily at 03:00 UTC
-- Logs to `/var/log/docfast-backup.log`
-- Auto-prunes old backups
-
-### Restore Script: `/opt/docfast-borg-restore.sh`
-- List available backups: `./docfast-borg-restore.sh list`
-- Restore specific backup: `./docfast-borg-restore.sh restore docfast-YYYY-MM-DD_HHMM`
-- Restore latest backup: `./docfast-borg-restore.sh restore latest`
-
-## Manual Backup Commands
-
-```bash
-# Run backup manually
-/opt/docfast-borg-backup.sh
-
-# List all backups
-export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
-borg list /opt/borg-backups/docfast
-
-# Show repository info
-borg info /opt/borg-backups/docfast
-
-# Show specific backup contents
-borg list /opt/borg-backups/docfast::docfast-2026-02-15_1103
-```
-
-## Disaster Recovery Procedure
-
-### Complete Server Rebuild
-If the entire server is lost, follow these steps on a new server:
-
-1. **Install dependencies**:
- ```bash
- apt update && apt install -y docker.io docker-compose postgresql-16 nginx borgbackup
- systemctl enable postgresql docker
- ```
-
-2. **Copy backup data**:
- - Transfer `/opt/borg-backups/` directory to new server
- - Transfer `/opt/borg-backups/docfast-key-backup.txt`
-
-3. **Import Borg key**:
- ```bash
- export BORG_PASSPHRASE="docfast-backup-2026"
- borg key import /opt/borg-backups/docfast /opt/borg-backups/docfast-key-backup.txt
- ```
-
-4. **Restore latest backup**:
- ```bash
- /opt/docfast-borg-restore.sh restore latest
- ```
-
-5. **Follow manual restore steps** (shown by restore script):
- - Stop services
- - Restore database
- - Restore configuration files
- - Set permissions
- - Start services
-
-### Database-Only Recovery
-If only the database needs restoration:
-
-```bash
-# Stop DocFast
-cd /opt/docfast && docker-compose down
-
-# Restore database
-export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
-cd /tmp
-borg extract /opt/borg-backups/docfast::docfast-YYYY-MM-DD_HHMM
-sudo -u postgres dropdb docfast
-sudo -u postgres createdb -O docfast docfast
-export PGPASSFILE="/root/.pgpass"
-pg_restore -d docfast /tmp/tmp/docfast-backup-*/docfast-db.dump
-
-# Restart DocFast
-cd /opt/docfast && docker-compose up -d
-```
-
-## Migration to Off-Site Storage
-
-### Option 1: Hetzner Storage Box (Recommended)
-Manual setup required (Hetzner Storage Box API not available):
-
-1. **Purchase Hetzner Storage Box**
- - Minimum 10GB size
- - Enable SSH access in Hetzner Console
-
-2. **Configure SSH access**:
- ```bash
- # Generate SSH key for storage box
- ssh-keygen -t ed25519 -f /root/.ssh/hetzner-storage-box
-
- # Add public key to storage box in Hetzner Console
- cat /root/.ssh/hetzner-storage-box.pub
- ```
-
-3. **Update backup script**:
- Change `BORG_REPO` in `/opt/docfast-borg-backup.sh`:
- ```bash
- BORG_REPO="ssh://uXXXXXX@uXXXXXX.your-storagebox.de:23/./docfast-backups"
- ```
-
-4. **Initialize remote repository**:
- ```bash
- export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
- borg init --encryption=repokey ssh://uXXXXXX@uXXXXXX.your-storagebox.de:23/./docfast-backups
- ```
-
-### Option 2: AWS S3/Glacier
-Use rclone + borg for S3 storage (requires investor approval for AWS costs).
-
-## Monitoring & Maintenance
-
-### Check Backup Status
-```bash
-# View recent backup logs
-tail -f /var/log/docfast-backup.log
-
-# Check repository size and stats
-export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
-borg info /opt/borg-backups/docfast
-```
-
-### Manual Cleanup
-```bash
-# Prune old backups manually
-borg prune --keep-daily 7 --keep-weekly 4 --keep-monthly 3 /opt/borg-backups/docfast
-
-# Compact repository
-borg compact /opt/borg-backups/docfast
-```
-
-### Repository Health Check
-```bash
-# Check repository consistency
-borg check --verify-data /opt/borg-backups/docfast
-```
-
-## Important Notes
-
-1. **Test restores regularly** - Run restore test monthly
-2. **Monitor backup logs** - Check for failures in `/var/log/docfast-backup.log`
-3. **Keep key safe** - Store `/opt/borg-backups/docfast-key-backup.txt` securely off-site
-4. **Update passphrase annually** - Change to new year format when year changes
-5. **Local storage limit** - Current server has ~19GB available, monitor usage
-
-## Migration Timeline
-- **Immediate**: Local BorgBackup operational (✅ Complete)
-- **Phase 2**: Off-site storage setup (requires Storage Box purchase or AWS approval)
-- **Phase 3**: Automated off-site testing and monitoring
\ No newline at end of file
diff --git a/CI-CD-SETUP-COMPLETE.md b/CI-CD-SETUP-COMPLETE.md
deleted file mode 100644
index d1aee96..0000000
--- a/CI-CD-SETUP-COMPLETE.md
+++ /dev/null
@@ -1,121 +0,0 @@
-# DocFast CI/CD Pipeline Setup - COMPLETED ✅
-
-## What Was Implemented
-
-### ✅ Forgejo Actions Workflow
-- **File**: `.forgejo/workflows/deploy.yml`
-- **Trigger**: Push to `main` branch
-- **Process**:
- 1. SSH to production server (167.235.156.214)
- 2. Pull latest code from git
- 3. Tag current Docker image for rollback (`rollback-YYYYMMDD-HHMMSS`)
- 4. Build new Docker image with `--no-cache`
- 5. Stop current services (30s graceful timeout)
- 6. Start new services with `docker compose up -d`
- 7. Health check at `http://127.0.0.1:3100/health` (30 attempts, 5s intervals)
- 8. **Auto-rollback** if health check fails
- 9. Cleanup old rollback images (keeps last 5)
-
-### ✅ Rollback Mechanism
-- **Automatic**: Built into the deployment workflow
-- **Manual Script**: `scripts/rollback.sh` for emergency use
-- **Image Tagging**: Previous images tagged with timestamps
-- **Auto-cleanup**: Removes old rollback images automatically
-
-### ✅ Documentation
-- **`DEPLOYMENT.md`**: Complete deployment guide
-- **`CI-CD-SETUP-COMPLETE.md`**: This summary
-- **Inline comments**: Detailed workflow documentation
-
-### ✅ Git Integration
-- Repository: `git@git.cloonar.com:openclawd/docfast.git`
-- SSH access configured with key: `/home/openclaw/.ssh/docfast`
-- All CI/CD files committed and pushed successfully
-
-## What Needs Manual Setup (5 minutes)
-
-### 🔧 Repository Secrets
-Go to: https://git.cloonar.com/openclawd/docfast/settings/actions/secrets
-
-Add these 3 secrets:
-1. **SERVER_HOST**: `167.235.156.214`
-2. **SERVER_USER**: `root`
-3. **SSH_PRIVATE_KEY**: (copy content from `/home/openclaw/.ssh/docfast`)
-
-### 🧪 Test the Pipeline
-1. Once secrets are added, push any change to main branch
-2. Check Actions tab: https://git.cloonar.com/openclawd/docfast/actions
-3. Watch deployment progress
-4. Verify with: `curl http://127.0.0.1:3100/health`
-
-## How to Trigger Deployments
-
-- **Automatic**: Any push to `main` branch
-- **Manual**: Push a trivial change (already prepared: VERSION file)
-
-## How to Rollback
-
-### Automatic Rollback
-- Happens automatically if new deployment fails health checks
-- No manual intervention required
-
-### Manual Rollback Options
-```bash
-# Option 1: Use the rollback script
-ssh root@167.235.156.214
-cd /root/docfast
-./scripts/rollback.sh
-
-# Option 2: Manual Docker commands
-ssh root@167.235.156.214
-docker compose down
-docker images | grep rollback # Find latest rollback image
-docker tag docfast-docfast:rollback-YYYYMMDD-HHMMSS docfast-docfast:latest
-docker compose up -d
-```
-
-## Monitoring Commands
-
-```bash
-# Health check
-curl http://127.0.0.1:3100/health
-
-# Service status
-docker compose ps
-
-# View logs
-docker compose logs -f docfast
-
-# Check rollback images available
-docker images | grep docfast-docfast
-```
-
-## Files Added/Modified
-
-```
-.forgejo/workflows/deploy.yml # Main deployment workflow
-scripts/rollback.sh # Emergency rollback script
-scripts/setup-secrets.sh # Helper script (API had auth issues)
-DEPLOYMENT.md # Deployment documentation
-CI-CD-SETUP-COMPLETE.md # This summary
-VERSION # Test file for pipeline testing
-```
-
-## Next Steps
-
-1. **Set up secrets** in Forgejo (5 minutes)
-2. **Test deployment** by making a small change
-3. **Verify** the health check endpoint works
-4. **Document** any environment-specific adjustments needed
-
-## Success Criteria ✅
-
-- [x] Forgejo Actions available and configured
-- [x] Deployment workflow created and tested (syntax)
-- [x] Rollback mechanism implemented (automatic + manual)
-- [x] Health check integration (`/health` endpoint)
-- [x] Git repository integration working
-- [x] Documentation complete
-- [x] Test change ready for pipeline verification
-
-**Ready for production use once secrets are configured!** 🚀
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index 1143405..92c3f39 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,37 @@
-FROM node:22-bookworm-slim
+# ============================================
+# Stage 1: Builder
+# ============================================
+FROM node:22-bookworm-slim AS builder
+
+WORKDIR /app
+
+# Copy package files for dependency installation
+COPY package*.json tsconfig.json ./
+
+# Install ALL dependencies (including devDependencies for build)
+RUN npm install
+
+# Copy source code and build scripts
+COPY src/ src/
+COPY scripts/ scripts/
+COPY public/ public/
+
+# Compile TypeScript
+RUN npx tsc
+
+# Generate OpenAPI spec
+RUN node scripts/generate-openapi.mjs
+
+# Build HTML templates
+RUN node scripts/build-html.cjs
+
+# Create swagger-ui symlink in builder stage
+RUN rm -f public/swagger-ui && ln -s /app/node_modules/swagger-ui-dist public/swagger-ui
+
+# ============================================
+# Stage 2: Production
+# ============================================
+FROM node:22-bookworm-slim AS production
# Install Chromium and dependencies as root
RUN apt-get update && apt-get install -y --no-install-recommends \
@@ -9,20 +42,26 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN groupadd --gid 1001 docfast \
&& useradd --uid 1001 --gid docfast --shell /bin/bash --create-home docfast
-# Set environment variables
-ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
-ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
-
WORKDIR /app
+
+# Copy package files for production dependency installation
COPY package*.json ./
+
+# Install ONLY production dependencies
RUN npm install --omit=dev
-COPY dist/ dist/
-COPY scripts/ scripts/
-COPY public/ public/
-RUN node scripts/build-html.cjs
+# Copy compiled artifacts from builder stage
+COPY --from=builder /app/dist ./dist
+COPY --from=builder /app/public ./public
+COPY --from=builder /app/src ./src
+
+# Recreate swagger-ui symlink in production stage
RUN rm -f public/swagger-ui && ln -s /app/node_modules/swagger-ui-dist public/swagger-ui
+# Set Puppeteer environment variables
+ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
+ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
+
# Create data directory and set ownership to docfast user
RUN mkdir -p /app/data && chown -R docfast:docfast /app
diff --git a/Dockerfile.backup b/Dockerfile.backup
deleted file mode 100644
index bdc953a..0000000
--- a/Dockerfile.backup
+++ /dev/null
@@ -1,19 +0,0 @@
-FROM node:22-bookworm-slim
-
-# Install Chromium (works on ARM and x86)
-RUN apt-get update && apt-get install -y --no-install-recommends \
- chromium fonts-liberation \
- && rm -rf /var/lib/apt/lists/*
-
-ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
-ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
-
-WORKDIR /app
-COPY package*.json ./
-RUN npm ci --omit=dev
-COPY dist/ dist/
-COPY public/ public/
-
-ENV PORT=3100
-EXPOSE 3100
-CMD ["node", "dist/index.js"]
diff --git a/README.md b/README.md
index 6cd4e54..4052ea8 100644
--- a/README.md
+++ b/README.md
@@ -1,38 +1,71 @@
# DocFast API
-Fast, simple HTML/Markdown to PDF API with built-in invoice templates.
+Fast, reliable HTML/Markdown/URL to PDF conversion API. EU-hosted, GDPR compliant.
+
+**Website:** https://docfast.dev
+**Docs:** https://docfast.dev/docs
+**Status:** https://docfast.dev/status
+
+## Features
+
+- **HTML → PDF** — Full documents or fragments with optional CSS
+- **Markdown → PDF** — GitHub-flavored Markdown with syntax highlighting
+- **URL → PDF** — Render any public webpage as PDF (SSRF-protected)
+- **Invoice Templates** — Built-in professional invoice template
+- **PDF Options** — Paper size, orientation, margins, headers/footers, page ranges, scaling
## Quick Start
+### 1. Get an API Key
+
+Sign up at https://docfast.dev — free demo available, Pro plan at €9/month for 5,000 PDFs.
+
+### 2. Generate a PDF
+
```bash
-npm install
-npm run build
-API_KEYS=your-key-here npm start
+curl -X POST https://docfast.dev/v1/convert/html \
+ -H "Authorization: Bearer YOUR_API_KEY" \
+ -H "Content-Type: application/json" \
+ -d '{"html": "
Hello World
Your first PDF.
"}' \
+ -o output.pdf
```
-## Endpoints
+## API Endpoints
### Convert HTML to PDF
+
```bash
-curl -X POST http://localhost:3100/v1/convert/html \
+curl -X POST https://docfast.dev/v1/convert/html \
-H "Authorization: Bearer YOUR_KEY" \
-H "Content-Type: application/json" \
- -d '{"html": "Hello
World
"}' \
+ -d '{"html": "Hello
", "format": "A4", "margin": {"top": "20mm"}}' \
-o output.pdf
```
### Convert Markdown to PDF
+
```bash
-curl -X POST http://localhost:3100/v1/convert/markdown \
+curl -X POST https://docfast.dev/v1/convert/markdown \
-H "Authorization: Bearer YOUR_KEY" \
-H "Content-Type: application/json" \
- -d '{"markdown": "# Hello\n\nWorld"}' \
+ -d '{"markdown": "# Hello\n\nWorld", "css": "body { font-family: sans-serif; }"}' \
+ -o output.pdf
+```
+
+### Convert URL to PDF
+
+```bash
+curl -X POST https://docfast.dev/v1/convert/url \
+ -H "Authorization: Bearer YOUR_KEY" \
+ -H "Content-Type: application/json" \
+ -d '{"url": "https://example.com", "format": "A4", "landscape": true}' \
-o output.pdf
```
### Invoice Template
+
```bash
-curl -X POST http://localhost:3100/v1/templates/invoice/render \
+curl -X POST https://docfast.dev/v1/templates/invoice/render \
-H "Authorization: Bearer YOUR_KEY" \
-H "Content-Type: application/json" \
-d '{
@@ -40,23 +73,95 @@ curl -X POST http://localhost:3100/v1/templates/invoice/render \
"date": "2026-02-14",
"from": {"name": "Your Company", "email": "you@example.com"},
"to": {"name": "Client", "email": "client@example.com"},
- "items": [{"description": "Service", "quantity": 1, "unitPrice": 100, "taxRate": 20}]
+ "items": [{"description": "Consulting", "quantity": 10, "unitPrice": 150, "taxRate": 20}]
}' \
-o invoice.pdf
```
-### Options
-- `format`: Paper size (A4, Letter, Legal, etc.)
-- `landscape`: true/false
-- `margin`: `{top, right, bottom, left}` in CSS units
-- `css`: Custom CSS (for markdown/html fragments)
-- `filename`: Suggested filename in Content-Disposition header
+### Demo (No Auth Required)
-## Auth
-Pass API key via `Authorization: Bearer `. Set `API_KEYS` env var (comma-separated for multiple keys).
+Try the API without signing up:
-## Docker
```bash
-docker build -t docfast .
-docker run -p 3100:3100 -e API_KEYS=your-key docfast
+curl -X POST https://docfast.dev/v1/demo/html \
+ -H "Content-Type: application/json" \
+ -d '{"html": "Demo PDF
No API key needed.
"}' \
+ -o demo.pdf
```
+
+Demo PDFs include a watermark and are rate-limited.
+
+## PDF Options
+
+All conversion endpoints accept these options:
+
+| Option | Type | Default | Description |
+|--------|------|---------|-------------|
+| `format` | string | `"A4"` | Paper size: A4, Letter, Legal, A3, etc. |
+| `landscape` | boolean | `false` | Landscape orientation |
+| `margin` | object | `{top:"0",right:"0",bottom:"0",left:"0"}` | Margins in CSS units (px, mm, in, cm) |
+| `printBackground` | boolean | `true` | Include background colors/images |
+| `filename` | string | `"document.pdf"` | Suggested filename in Content-Disposition |
+| `css` | string | — | Custom CSS (for HTML fragments and Markdown) |
+| `scale` | number | `1` | Scale (0.1–2.0) |
+| `pageRanges` | string | — | Page ranges, e.g. `"1-3, 5"` |
+| `width` | string | — | Custom page width (overrides format) |
+| `height` | string | — | Custom page height (overrides format) |
+| `headerTemplate` | string | — | HTML template for page header |
+| `footerTemplate` | string | — | HTML template for page footer |
+| `displayHeaderFooter` | boolean | `false` | Show header/footer |
+| `preferCSSPageSize` | boolean | `false` | Use CSS `@page` size over format |
+
+## Authentication
+
+Pass your API key via either:
+- `Authorization: Bearer ` header
+- `X-API-Key: ` header
+
+## Development
+
+```bash
+# Install dependencies
+npm install
+
+# Run in development mode
+npm run dev
+
+# Run tests
+npm test
+
+# Build
+npm run build
+
+# Start production server
+npm start
+```
+
+### Environment Variables
+
+| Variable | Required | Description |
+|----------|----------|-------------|
+| `DATABASE_URL` | Yes | PostgreSQL connection string |
+| `STRIPE_SECRET_KEY` | Yes | Stripe API key for billing |
+| `STRIPE_WEBHOOK_SECRET` | Yes | Stripe webhook signature secret |
+| `SMTP_HOST` | Yes | SMTP server hostname |
+| `SMTP_PORT` | Yes | SMTP server port |
+| `SMTP_USER` | Yes | SMTP username |
+| `SMTP_PASS` | Yes | SMTP password |
+| `BASE_URL` | No | Base URL (default: https://docfast.dev) |
+| `PORT` | No | Server port (default: 3100) |
+| `BROWSER_COUNT` | No | Puppeteer browser instances (default: 2) |
+| `PAGES_PER_BROWSER` | No | Pages per browser (default: 8) |
+| `LOG_LEVEL` | No | Pino log level (default: info) |
+
+### Architecture
+
+- **Runtime:** Node.js + Express
+- **PDF Engine:** Puppeteer (Chromium) with browser pool
+- **Database:** PostgreSQL (via pg)
+- **Payments:** Stripe
+- **Email:** SMTP (nodemailer)
+
+## License
+
+Proprietary — Cloonar Technologies GmbH
diff --git a/bugs.md b/bugs.md
deleted file mode 100644
index 8ebec70..0000000
--- a/bugs.md
+++ /dev/null
@@ -1,24 +0,0 @@
-# DocFast Bugs
-
-## Open
-
-### BUG-030: Email change backend not implemented
-- **Severity:** High
-- **Found:** 2026-02-14 QA session
-- **Description:** Frontend UI for email change is deployed (modal, form, JS handlers), but no backend routes exist. Frontend calls `/v1/email-change` and `/v1/email-change/verify` which return 404.
-- **Impact:** Users see "Change Email" link in footer but the feature doesn't work.
-- **Fix:** Implement `src/routes/email-change.ts` with verification code flow similar to signup/recover.
-
-### BUG-031: Stray file "\001@" in repository
-- **Severity:** Low
-- **Found:** 2026-02-14
-- **Description:** An accidental file named `\001@` was committed to the repo.
-- **Fix:** `git rm "\001@"` and commit.
-
-### BUG-032: Swagger UI content not rendered via web_fetch
-- **Severity:** Low (cosmetic)
-- **Found:** 2026-02-14
-- **Description:** /docs page loads (200) and has swagger-ui assets, but content is JS-rendered so web_fetch can't verify full render. Needs browser-based QA for full verification.
-
-## Fixed
-(none yet - this is first QA session)
diff --git a/decisions.md b/decisions.md
deleted file mode 100644
index a68912d..0000000
--- a/decisions.md
+++ /dev/null
@@ -1,21 +0,0 @@
-# DocFast Decisions Log
-
-## 2026-02-14: Mandatory QA After Every Deployment
-
-**Rule:** Every deployment MUST be followed by a full QA session. No exceptions.
-
-**QA Checklist:**
-- Landing page loads, zero console errors
-- Signup flow works (email verification)
-- Key recovery flow works
-- Email change flow works (when backend is implemented)
-- Swagger UI loads at /docs
-- API endpoints work (HTML→PDF, Markdown→PDF, URL→PDF)
-- Health endpoint returns ok
-- All previous features still working
-
-**Rationale:** Code was deployed to production without verification multiple times, leading to broken features being live. QA catches regressions before users do.
-
-## 2026-02-14: Code Must Be Committed Before Deployment
-
-Changes were found uncommitted on the production server. All code changes must be committed and pushed to Forgejo before deploying.
diff --git a/dist/__tests__/api.test.js b/dist/__tests__/api.test.js
index b99fca3..93deda1 100644
--- a/dist/__tests__/api.test.js
+++ b/dist/__tests__/api.test.js
@@ -1,24 +1,20 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { app } from "../index.js";
-// Note: These tests require Puppeteer/Chrome to be available
-// For CI, use the Dockerfile which includes Chrome
const BASE = "http://localhost:3199";
let server;
beforeAll(async () => {
- process.env.API_KEYS = "test-key";
- process.env.PORT = "3199";
- // Import fresh to pick up env
server = app.listen(3199);
- // Wait for browser init
- await new Promise((r) => setTimeout(r, 2000));
+ await new Promise((r) => setTimeout(r, 200));
});
afterAll(async () => {
- server?.close();
+ await new Promise((resolve) => server?.close(() => resolve()));
});
describe("Auth", () => {
it("rejects requests without API key", async () => {
const res = await fetch(`${BASE}/v1/convert/html`, { method: "POST" });
expect(res.status).toBe(401);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
});
it("rejects invalid API key", async () => {
const res = await fetch(`${BASE}/v1/convert/html`, {
@@ -26,6 +22,8 @@ describe("Auth", () => {
headers: { Authorization: "Bearer wrong-key" },
});
expect(res.status).toBe(403);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
});
});
describe("Health", () => {
@@ -35,51 +33,243 @@ describe("Health", () => {
const data = await res.json();
expect(data.status).toBe("ok");
});
+ it("includes database field", async () => {
+ const res = await fetch(`${BASE}/health`);
+ expect(res.status).toBe(200);
+ const data = await res.json();
+ expect(data.database).toBeDefined();
+ expect(data.database.status).toBeDefined();
+ });
+ it("includes pool field with size, active, available", async () => {
+ const res = await fetch(`${BASE}/health`);
+ expect(res.status).toBe(200);
+ const data = await res.json();
+ expect(data.pool).toBeDefined();
+ expect(typeof data.pool.size).toBe("number");
+ expect(typeof data.pool.active).toBe("number");
+ expect(typeof data.pool.available).toBe("number");
+ });
+ it("includes version field", async () => {
+ const res = await fetch(`${BASE}/health`);
+ expect(res.status).toBe(200);
+ const data = await res.json();
+ expect(data.version).toBeDefined();
+ expect(typeof data.version).toBe("string");
+ });
});
describe("HTML to PDF", () => {
it("converts simple HTML", async () => {
const res = await fetch(`${BASE}/v1/convert/html`, {
method: "POST",
- headers: {
- Authorization: "Bearer test-key",
- "Content-Type": "application/json",
- },
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
body: JSON.stringify({ html: "Test
" }),
});
expect(res.status).toBe(200);
expect(res.headers.get("content-type")).toBe("application/pdf");
const buf = await res.arrayBuffer();
- expect(buf.byteLength).toBeGreaterThan(100);
- // PDF magic bytes
+ expect(buf.byteLength).toBeGreaterThan(10);
const header = new Uint8Array(buf.slice(0, 5));
expect(String.fromCharCode(...header)).toBe("%PDF-");
});
it("rejects missing html field", async () => {
const res = await fetch(`${BASE}/v1/convert/html`, {
method: "POST",
- headers: {
- Authorization: "Bearer test-key",
- "Content-Type": "application/json",
- },
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
body: JSON.stringify({}),
});
expect(res.status).toBe(400);
});
+ it("converts HTML with A3 format option", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "A3 Test
", options: { format: "A3" } }),
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers.get("content-type")).toBe("application/pdf");
+ });
+ it("converts HTML with landscape option", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "Landscape Test
", options: { landscape: true } }),
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers.get("content-type")).toBe("application/pdf");
+ });
+ it("converts HTML with margin options", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "Margin Test
", options: { margin: { top: "2cm" } } }),
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers.get("content-type")).toBe("application/pdf");
+ });
+ it("rejects invalid JSON body", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: "invalid json{",
+ });
+ expect(res.status).toBe(400);
+ });
+ it("rejects wrong content-type header", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "text/plain" },
+ body: JSON.stringify({ html: "Test
" }),
+ });
+ expect(res.status).toBe(415);
+ });
+ it("handles empty html string", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "" }),
+ });
+ // Empty HTML should still generate a PDF (just blank) - but validation may reject it
+ expect([200, 400]).toContain(res.status);
+ });
});
describe("Markdown to PDF", () => {
it("converts markdown", async () => {
const res = await fetch(`${BASE}/v1/convert/markdown`, {
method: "POST",
- headers: {
- Authorization: "Bearer test-key",
- "Content-Type": "application/json",
- },
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
body: JSON.stringify({ markdown: "# Hello\n\nWorld" }),
});
expect(res.status).toBe(200);
expect(res.headers.get("content-type")).toBe("application/pdf");
});
});
+describe("URL to PDF", () => {
+ it("rejects missing url field", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({}),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toContain("url");
+ });
+ it("blocks private IP addresses (SSRF protection)", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ url: "http://127.0.0.1" }),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toContain("private");
+ });
+ it("blocks localhost (SSRF protection)", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ url: "http://localhost" }),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toContain("private");
+ });
+ it("blocks 0.0.0.0 (SSRF protection)", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ url: "http://0.0.0.0" }),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toContain("private");
+ });
+ it("returns default filename in Content-Disposition for /convert/html", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "hello
" }),
+ });
+ expect(res.status).toBe(200);
+ const disposition = res.headers.get("content-disposition");
+ expect(disposition).toContain('filename="document.pdf"');
+ });
+ it("rejects invalid protocol (ftp)", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ url: "ftp://example.com" }),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toContain("http");
+ });
+ it("rejects invalid URL format", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ url: "not-a-url" }),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toContain("Invalid");
+ });
+ it("converts valid URL to PDF", async () => {
+ const res = await fetch(`${BASE}/v1/convert/url`, {
+ method: "POST",
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
+ body: JSON.stringify({ url: "https://example.com" }),
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers.get("content-type")).toBe("application/pdf");
+ const buf = await res.arrayBuffer();
+ expect(buf.byteLength).toBeGreaterThan(10);
+ const header = new Uint8Array(buf.slice(0, 5));
+ expect(String.fromCharCode(...header)).toBe("%PDF-");
+ });
+});
+describe("Demo Endpoints", () => {
+ it("demo/html converts HTML to PDF without auth", async () => {
+ const res = await fetch(`${BASE}/v1/demo/html`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "Demo Test
" }),
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers.get("content-type")).toBe("application/pdf");
+ const buf = await res.arrayBuffer();
+ expect(buf.byteLength).toBeGreaterThan(10);
+ const header = new Uint8Array(buf.slice(0, 5));
+ expect(String.fromCharCode(...header)).toBe("%PDF-");
+ });
+ it("demo/markdown converts markdown to PDF without auth", async () => {
+ const res = await fetch(`${BASE}/v1/demo/markdown`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ markdown: "# Demo Markdown\n\nTest content" }),
+ });
+ expect(res.status).toBe(200);
+ expect(res.headers.get("content-type")).toBe("application/pdf");
+ });
+ it("demo rejects missing html field", async () => {
+ const res = await fetch(`${BASE}/v1/demo/html`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({}),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ });
+ it("demo rejects wrong content-type", async () => {
+ const res = await fetch(`${BASE}/v1/demo/html`, {
+ method: "POST",
+ headers: { "Content-Type": "text/plain" },
+ body: "Test
",
+ });
+ expect(res.status).toBe(415);
+ });
+});
describe("Templates", () => {
it("lists templates", async () => {
const res = await fetch(`${BASE}/v1/templates`, {
@@ -93,10 +283,7 @@ describe("Templates", () => {
it("renders invoice template", async () => {
const res = await fetch(`${BASE}/v1/templates/invoice/render`, {
method: "POST",
- headers: {
- Authorization: "Bearer test-key",
- "Content-Type": "application/json",
- },
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
body: JSON.stringify({
invoiceNumber: "TEST-001",
date: "2026-02-14",
@@ -111,12 +298,295 @@ describe("Templates", () => {
it("returns 404 for unknown template", async () => {
const res = await fetch(`${BASE}/v1/templates/nonexistent/render`, {
method: "POST",
- headers: {
- Authorization: "Bearer test-key",
- "Content-Type": "application/json",
- },
+ headers: { Authorization: "Bearer test-key", "Content-Type": "application/json" },
body: JSON.stringify({}),
});
expect(res.status).toBe(404);
});
});
+// === NEW TESTS: Task 3 ===
+describe("Signup endpoint (discontinued)", () => {
+ it("returns 410 Gone", async () => {
+ const res = await fetch(`${BASE}/v1/signup/free`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ email: "test@example.com" }),
+ });
+ expect(res.status).toBe(410);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ });
+});
+describe("Recovery endpoint validation", () => {
+ it("rejects missing email", async () => {
+ const res = await fetch(`${BASE}/v1/recover`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({}),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ });
+ it("rejects invalid email format", async () => {
+ const res = await fetch(`${BASE}/v1/recover`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ email: "not-an-email" }),
+ });
+ expect(res.status).toBe(400);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ });
+ it("accepts valid email (always returns success)", async () => {
+ const res = await fetch(`${BASE}/v1/recover`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ email: "user@example.com" }),
+ });
+ expect(res.status).toBe(200);
+ const data = await res.json();
+ expect(data.status).toBe("recovery_sent");
+ });
+ it("verify rejects missing fields", async () => {
+ const res = await fetch(`${BASE}/v1/recover/verify`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({}),
+ });
+ // May be 400 (validation) or 429 (rate limited from previous recover calls)
+ expect([400, 429]).toContain(res.status);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ });
+});
+describe("CORS headers", () => {
+ it("sets Access-Control-Allow-Origin to * for API routes", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "OPTIONS",
+ });
+ expect(res.status).toBe(204);
+ expect(res.headers.get("access-control-allow-origin")).toBe("*");
+ });
+ it("restricts CORS for signup/billing/demo routes to docfast.dev", async () => {
+ const res = await fetch(`${BASE}/v1/demo/html`, {
+ method: "OPTIONS",
+ });
+ expect(res.status).toBe(204);
+ expect(res.headers.get("access-control-allow-origin")).toBe("https://docfast.dev");
+ });
+ it("includes correct allowed methods", async () => {
+ const res = await fetch(`${BASE}/health`, { method: "OPTIONS" });
+ const methods = res.headers.get("access-control-allow-methods");
+ expect(methods).toContain("GET");
+ expect(methods).toContain("POST");
+ });
+});
+describe("Error response format consistency", () => {
+ it("401 returns {error: string}", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, { method: "POST" });
+ expect(res.status).toBe(401);
+ const data = await res.json();
+ expect(typeof data.error).toBe("string");
+ });
+ it("403 returns {error: string}", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: { Authorization: "Bearer bad-key" },
+ });
+ expect(res.status).toBe(403);
+ const data = await res.json();
+ expect(typeof data.error).toBe("string");
+ });
+ it("404 API returns {error: string}", async () => {
+ const res = await fetch(`${BASE}/v1/nonexistent`);
+ expect(res.status).toBe(404);
+ const data = await res.json();
+ expect(typeof data.error).toBe("string");
+ });
+ it("410 returns {error: string}", async () => {
+ const res = await fetch(`${BASE}/v1/signup/free`, { method: "POST" });
+ expect(res.status).toBe(410);
+ const data = await res.json();
+ expect(typeof data.error).toBe("string");
+ });
+});
+describe("Rate limiting (global)", () => {
+ it("includes rate limit headers", async () => {
+ const res = await fetch(`${BASE}/health`);
+ // express-rate-limit with standardHeaders:true uses RateLimit-* headers
+ const limit = res.headers.get("ratelimit-limit");
+ expect(limit).toBeDefined();
+ });
+});
+describe("API root", () => {
+ it("returns API info", async () => {
+ const res = await fetch(`${BASE}/api`);
+ expect(res.status).toBe(200);
+ const data = await res.json();
+ expect(data.name).toBe("DocFast API");
+ expect(data.version).toBeDefined();
+ expect(data.endpoints).toBeInstanceOf(Array);
+ });
+});
+describe("JS minification", () => {
+ it("serves minified JS files in homepage HTML", async () => {
+ const res = await fetch(`${BASE}/`);
+ expect(res.status).toBe(200);
+ const html = await res.text();
+ // Check that HTML references app.js and status.js
+ expect(html).toContain('src="/app.js"');
+ // Fetch the JS file and verify it's minified (no excessive whitespace)
+ const jsRes = await fetch(`${BASE}/app.js`);
+ expect(jsRes.status).toBe(200);
+ const jsContent = await jsRes.text();
+ // Minified JS should not have excessive whitespace or comments
+ // Basic check: line count should be reasonable for minified code
+ const lineCount = jsContent.split('\n').length;
+ expect(lineCount).toBeLessThan(50); // Original has ~400+ lines, minified should be much less
+ // Should not contain developer comments (/* ... */)
+ expect(jsContent).not.toMatch(/\/\*[\s\S]*?\*\//);
+ });
+});
+describe("Usage endpoint", () => {
+ it("requires authentication (401 without key)", async () => {
+ const res = await fetch(`${BASE}/v1/usage`);
+ expect(res.status).toBe(401);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ expect(typeof data.error).toBe("string");
+ });
+ it("requires admin key (503 when not configured)", async () => {
+ const res = await fetch(`${BASE}/v1/usage`, {
+ headers: { Authorization: "Bearer test-key" },
+ });
+ expect(res.status).toBe(503);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ expect(data.error).toContain("Admin access not configured");
+ });
+ it("returns usage data with admin key", async () => {
+ // This test will likely fail since we don't have an admin key set in test environment
+ // But it documents the expected behavior
+ const res = await fetch(`${BASE}/v1/usage`, {
+ headers: { Authorization: "Bearer admin-key" },
+ });
+ // Could be 503 (admin access not configured) or 403 (admin access required)
+ expect([403, 503]).toContain(res.status);
+ });
+});
+describe("Billing checkout", () => {
+ it("has rate limiting headers", async () => {
+ const res = await fetch(`${BASE}/v1/billing/checkout`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({}),
+ });
+ // Check rate limit headers are present (express-rate-limit should add these)
+ const limitHeader = res.headers.get("ratelimit-limit");
+ const remainingHeader = res.headers.get("ratelimit-remaining");
+ const resetHeader = res.headers.get("ratelimit-reset");
+ expect(limitHeader).toBeDefined();
+ expect(remainingHeader).toBeDefined();
+ expect(resetHeader).toBeDefined();
+ });
+ it("fails when Stripe not configured", async () => {
+ const res = await fetch(`${BASE}/v1/billing/checkout`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({}),
+ });
+ // Returns 500 due to missing STRIPE_SECRET_KEY in test environment
+ expect(res.status).toBe(500);
+ const data = await res.json();
+ expect(data.error).toBeDefined();
+ });
+});
+describe("Rate limit headers on PDF endpoints", () => {
+ it("includes rate limit headers on HTML conversion", async () => {
+ const res = await fetch(`${BASE}/v1/convert/html`, {
+ method: "POST",
+ headers: {
+ Authorization: "Bearer test-key",
+ "Content-Type": "application/json"
+ },
+ body: JSON.stringify({ html: "Test
" }),
+ });
+ expect(res.status).toBe(200);
+ // Check for rate limit headers
+ const limitHeader = res.headers.get("ratelimit-limit");
+ const remainingHeader = res.headers.get("ratelimit-remaining");
+ const resetHeader = res.headers.get("ratelimit-reset");
+ expect(limitHeader).toBeDefined();
+ expect(remainingHeader).toBeDefined();
+ expect(resetHeader).toBeDefined();
+ });
+ it("includes rate limit headers on demo endpoint", async () => {
+ const res = await fetch(`${BASE}/v1/demo/html`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ html: "Demo Test
" }),
+ });
+ expect(res.status).toBe(200);
+ // Check for rate limit headers
+ const limitHeader = res.headers.get("ratelimit-limit");
+ const remainingHeader = res.headers.get("ratelimit-remaining");
+ const resetHeader = res.headers.get("ratelimit-reset");
+ expect(limitHeader).toBeDefined();
+ expect(remainingHeader).toBeDefined();
+ expect(resetHeader).toBeDefined();
+ });
+});
+describe("OpenAPI spec", () => {
+ it("returns a valid OpenAPI 3.0 spec with paths", async () => {
+ const res = await fetch(`${BASE}/openapi.json`);
+ expect(res.status).toBe(200);
+ const spec = await res.json();
+ expect(spec.openapi).toBe("3.0.3");
+ expect(spec.info).toBeDefined();
+ expect(spec.info.title).toBe("DocFast API");
+ expect(Object.keys(spec.paths).length).toBeGreaterThanOrEqual(8);
+ });
+ it("includes all major endpoint groups", async () => {
+ const res = await fetch(`${BASE}/openapi.json`);
+ const spec = await res.json();
+ const paths = Object.keys(spec.paths);
+ expect(paths).toContain("/v1/convert/html");
+ expect(paths).toContain("/v1/convert/markdown");
+ expect(paths).toContain("/health");
+ });
+ it("PdfOptions schema includes all valid format values and waitUntil field", async () => {
+ const res = await fetch(`${BASE}/openapi.json`);
+ const spec = await res.json();
+ const pdfOptions = spec.components.schemas.PdfOptions;
+ expect(pdfOptions).toBeDefined();
+ // Check that all 11 format values are included
+ const expectedFormats = ["Letter", "Legal", "Tabloid", "Ledger", "A0", "A1", "A2", "A3", "A4", "A5", "A6"];
+ expect(pdfOptions.properties.format.enum).toEqual(expectedFormats);
+ // Check that waitUntil field exists with correct enum values
+ expect(pdfOptions.properties.waitUntil).toBeDefined();
+ expect(pdfOptions.properties.waitUntil.enum).toEqual(["load", "domcontentloaded", "networkidle0", "networkidle2"]);
+ // Check that headerTemplate and footerTemplate descriptions mention 100KB limit
+ expect(pdfOptions.properties.headerTemplate.description).toContain("100KB");
+ expect(pdfOptions.properties.footerTemplate.description).toContain("100KB");
+ });
+});
+describe("404 handler", () => {
+ it("returns proper JSON error format for API routes", async () => {
+ const res = await fetch(`${BASE}/v1/nonexistent-endpoint`);
+ expect(res.status).toBe(404);
+ const data = await res.json();
+ expect(typeof data.error).toBe("string");
+ expect(data.error).toContain("Not Found");
+ expect(data.error).toContain("GET");
+ expect(data.error).toContain("/v1/nonexistent-endpoint");
+ });
+ it("returns HTML 404 for non-API routes", async () => {
+ const res = await fetch(`${BASE}/nonexistent-page`);
+ expect(res.status).toBe(404);
+ const html = await res.text();
+ expect(html).toContain("");
+ expect(html).toContain("404");
+ expect(html).toContain("Page Not Found");
+ });
+});
diff --git a/dist/index.js b/dist/index.js
index aa69275..cf924ba 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -1,6 +1,7 @@
import express from "express";
import { randomUUID } from "crypto";
-import compression from "compression";
+import "./types.js"; // Augments Express.Request with requestId, acquirePdfSlot, releasePdfSlot
+import { compressionMiddleware } from "./middleware/compression.js";
import logger from "./services/logger.js";
import helmet from "helmet";
import path from "path";
@@ -9,17 +10,19 @@ import rateLimit from "express-rate-limit";
import { convertRouter } from "./routes/convert.js";
import { templatesRouter } from "./routes/templates.js";
import { healthRouter } from "./routes/health.js";
-import { signupRouter } from "./routes/signup.js";
+import { demoRouter } from "./routes/demo.js";
import { recoverRouter } from "./routes/recover.js";
+import { emailChangeRouter } from "./routes/email-change.js";
import { billingRouter } from "./routes/billing.js";
import { authMiddleware } from "./middleware/auth.js";
-import { usageMiddleware, loadUsageData } from "./middleware/usage.js";
-import { getUsageStats } from "./middleware/usage.js";
-import { pdfRateLimitMiddleware, getConcurrencyStats } from "./middleware/pdfRateLimit.js";
+import { usageMiddleware, loadUsageData, flushDirtyEntries } from "./middleware/usage.js";
+import { pdfRateLimitMiddleware } from "./middleware/pdfRateLimit.js";
+import { adminRouter } from "./routes/admin.js";
import { initBrowser, closeBrowser } from "./services/browser.js";
import { loadKeys, getAllKeys } from "./services/keys.js";
-import { verifyToken, loadVerifications } from "./services/verification.js";
-import { initDatabase, pool } from "./services/db.js";
+import { pagesRouter } from "./routes/pages.js";
+import { initDatabase, pool, cleanupStaleData } from "./services/db.js";
+import { startPeriodicCleanup, stopPeriodicCleanup } from "./utils/periodic-cleanup.js";
const app = express();
const PORT = parseInt(process.env.PORT || "3100", 10);
app.use(helmet({ crossOriginResourcePolicy: { policy: "cross-origin" } }));
@@ -43,14 +46,31 @@ app.use((_req, res, next) => {
next();
});
// Compression
-app.use(compression());
+app.use(compressionMiddleware);
+// Block search engine indexing on staging
+app.use((req, res, next) => {
+ if (req.hostname.includes("staging")) {
+ res.setHeader("X-Robots-Tag", "noindex, nofollow");
+ }
+ next();
+});
// Differentiated CORS middleware
+const ALLOWED_ORIGINS = new Set(["https://docfast.dev", "https://staging.docfast.dev"]);
app.use((req, res, next) => {
const isAuthBillingRoute = req.path.startsWith('/v1/signup') ||
req.path.startsWith('/v1/recover') ||
- req.path.startsWith('/v1/billing');
+ req.path.startsWith('/v1/billing') ||
+ req.path.startsWith('/v1/demo') ||
+ req.path.startsWith('/v1/email-change');
if (isAuthBillingRoute) {
- res.setHeader("Access-Control-Allow-Origin", "https://docfast.dev");
+ const origin = req.headers.origin;
+ if (origin && ALLOWED_ORIGINS.has(origin)) {
+ res.setHeader("Access-Control-Allow-Origin", origin);
+ res.setHeader("Vary", "Origin");
+ }
+ else {
+ res.setHeader("Access-Control-Allow-Origin", "https://docfast.dev");
+ }
}
else {
res.setHeader("Access-Control-Allow-Origin", "*");
@@ -66,7 +86,8 @@ app.use((req, res, next) => {
});
// Raw body for Stripe webhook signature verification
app.use("/v1/billing/webhook", express.raw({ type: "application/json" }));
-app.use(express.json({ limit: "2mb" }));
+// NOTE: No global express.json() here — route-specific parsers are applied
+// per-route below to enforce correct body size limits (BUG-101 fix).
app.use(express.text({ limit: "2mb", type: "text/*" }));
// Trust nginx proxy
app.set("trust proxy", 1);
@@ -80,106 +101,54 @@ const limiter = rateLimit({
app.use(limiter);
// Public routes
app.use("/health", healthRouter);
-app.use("/v1/signup", signupRouter);
-app.use("/v1/recover", recoverRouter);
-app.use("/v1/billing", billingRouter);
+app.use("/v1/demo", express.json({ limit: "50kb" }), pdfRateLimitMiddleware, demoRouter);
+/**
+ * @openapi
+ * /v1/signup/free:
+ * post:
+ * tags: [Account]
+ * deprecated: true
+ * summary: Request a free API key (discontinued)
+ * description: Free accounts have been discontinued. Use the demo endpoints or upgrade to Pro.
+ * responses:
+ * 410:
+ * description: Feature discontinued
+ * content:
+ * application/json:
+ * schema:
+ * type: object
+ * properties:
+ * error:
+ * type: string
+ * demo_endpoint:
+ * type: string
+ * pro_url:
+ * type: string
+ */
+app.use("/v1/signup", (_req, res) => {
+ res.status(410).json({
+ error: "Free accounts have been discontinued. Try our demo at POST /v1/demo/html or upgrade to Pro at https://docfast.dev",
+ demo_endpoint: "/v1/demo/html",
+ pro_url: "https://docfast.dev/#pricing"
+ });
+});
+// Default 2MB JSON parser for standard routes
+const defaultJsonParser = express.json({ limit: "2mb" });
+app.use("/v1/recover", defaultJsonParser, recoverRouter);
+app.use("/v1/email-change", defaultJsonParser, emailChangeRouter);
+app.use("/v1/billing", defaultJsonParser, billingRouter);
// Authenticated routes — conversion routes get tighter body limits (500KB)
const convertBodyLimit = express.json({ limit: "500kb" });
app.use("/v1/convert", convertBodyLimit, authMiddleware, usageMiddleware, pdfRateLimitMiddleware, convertRouter);
-app.use("/v1/templates", authMiddleware, usageMiddleware, templatesRouter);
-// Admin: usage stats (admin key required)
-const adminAuth = (req, res, next) => {
- const adminKey = process.env.ADMIN_API_KEY;
- if (!adminKey) {
- res.status(503).json({ error: "Admin access not configured" });
- return;
- }
- if (req.apiKeyInfo?.key !== adminKey) {
- res.status(403).json({ error: "Admin access required" });
- return;
- }
- next();
-};
-app.get("/v1/usage", authMiddleware, adminAuth, (req, res) => {
- res.json(getUsageStats(req.apiKeyInfo?.key));
-});
-// Admin: concurrency stats (admin key required)
-app.get("/v1/concurrency", authMiddleware, adminAuth, (_req, res) => {
- res.json(getConcurrencyStats());
-});
-// Email verification endpoint
-app.get("/verify", (req, res) => {
- const token = req.query.token;
- if (!token) {
- res.status(400).send(verifyPage("Invalid Link", "No verification token provided.", null));
- return;
- }
- const result = verifyToken(token);
- switch (result.status) {
- case "ok":
- res.send(verifyPage("Email Verified! 🚀", "Your DocFast API key is ready:", result.verification.apiKey));
- break;
- case "already_verified":
- res.send(verifyPage("Already Verified", "This email was already verified. Here's your API key:", result.verification.apiKey));
- break;
- case "expired":
- res.status(410).send(verifyPage("Link Expired", "This verification link has expired (24h). Please sign up again.", null));
- break;
- case "invalid":
- res.status(404).send(verifyPage("Invalid Link", "This verification link is not valid.", null));
- break;
- }
-});
-function verifyPage(title, message, apiKey) {
- return `
-
-${title} — DocFast
-
-
-
-
-
${title}
-
${message}
-${apiKey ? `
-
⚠️ Save your API key securely. You can recover it via email if needed.
-
${apiKey}
-
-` : `
`}
-
`;
-}
-// Landing page
+app.use("/v1/templates", defaultJsonParser, authMiddleware, usageMiddleware, templatesRouter);
+// Admin + usage routes (extracted to routes/admin.ts)
+app.use(adminRouter);
+// Pages, favicon, docs, openapi.json, /api (extracted to routes/pages.ts)
const __dirname = path.dirname(fileURLToPath(import.meta.url));
-// Favicon route
-app.get("/favicon.ico", (_req, res) => {
- res.setHeader('Content-Type', 'image/svg+xml');
- res.setHeader('Cache-Control', 'public, max-age=604800');
- res.sendFile(path.join(__dirname, "../public/favicon.svg"));
-});
-// Docs page (clean URL)
-app.get("/docs", (_req, res) => {
- // Swagger UI 5.x uses new Function() (via ajv) for JSON schema validation.
- // Override helmet's default CSP to allow 'unsafe-eval' + blob: for Swagger UI.
- res.setHeader("Content-Security-Policy", "default-src 'self';script-src 'self' 'unsafe-eval';style-src 'self' https: 'unsafe-inline';img-src 'self' data: blob:;font-src 'self' https: data:;connect-src 'self';worker-src 'self' blob:;base-uri 'self';form-action 'self';frame-ancestors 'self';object-src 'none'");
- res.setHeader('Cache-Control', 'public, max-age=86400');
- res.sendFile(path.join(__dirname, "../public/docs.html"));
-});
+app.use(pagesRouter);
// Static asset cache headers middleware
app.use((req, res, next) => {
if (/\.(css|js|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$/.test(req.path)) {
- console.log("CACHE HIT:", req.path);
res.setHeader('Cache-Control', 'public, max-age=604800, immutable');
}
next();
@@ -188,39 +157,6 @@ app.use(express.static(path.join(__dirname, "../public"), {
etag: true,
cacheControl: false,
}));
-// Legal pages (clean URLs)
-app.get("/impressum", (_req, res) => {
- res.setHeader('Cache-Control', 'public, max-age=86400');
- res.sendFile(path.join(__dirname, "../public/impressum.html"));
-});
-app.get("/privacy", (_req, res) => {
- res.setHeader('Cache-Control', 'public, max-age=86400');
- res.sendFile(path.join(__dirname, "../public/privacy.html"));
-});
-app.get("/terms", (_req, res) => {
- res.setHeader('Cache-Control', 'public, max-age=86400');
- res.sendFile(path.join(__dirname, "../public/terms.html"));
-});
-app.get("/status", (_req, res) => {
- res.setHeader("Cache-Control", "public, max-age=60");
- res.sendFile(path.join(__dirname, "../public/status.html"));
-});
-// API root
-app.get("/api", (_req, res) => {
- res.json({
- name: "DocFast API",
- version: "0.2.1",
- endpoints: [
- "POST /v1/signup/free — Get a free API key",
- "POST /v1/convert/html",
- "POST /v1/convert/markdown",
- "POST /v1/convert/url",
- "POST /v1/templates/:id/render",
- "GET /v1/templates",
- "POST /v1/billing/checkout — Start Pro subscription",
- ],
- });
-});
// 404 handler - must be after all routes
app.use((req, res) => {
// Check if it's an API request
@@ -263,22 +199,57 @@ app.use((req, res) => {