Clear all blockers: payment tested, CI/CD secrets added, status launch-ready

This commit is contained in:
Hoid 2026-02-16 18:49:39 +00:00
parent 33b1489e6c
commit 0ab4afd398
94 changed files with 10014 additions and 931 deletions

View file

@ -0,0 +1,162 @@
#!/bin/bash
# DocFast BorgBackup Script - Full Disaster Recovery
# Backs up: PostgreSQL, Docker volumes, nginx config, SSL certs, crontabs, OpenDKIM keys
# Schedule: daily at 03:00 UTC, keeps 7 daily + 4 weekly + 3 monthly
set -euo pipefail
# Configuration
BORG_REPO="/opt/borg-backups/docfast"
BACKUP_NAME="docfast-$(date +%Y-%m-%d_%H%M)"
TEMP_DIR="/tmp/docfast-backup-$$"
LOG_FILE="/var/log/docfast-backup.log"
# Database configuration
DB_NAME="docfast"
DB_USER="docfast"
DB_HOST="localhost"
DB_PORT="5432"
# Logging function
log() {
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE"
}
# Error handler
error_exit() {
log "ERROR: $1"
cleanup
exit 1
}
# Cleanup function
cleanup() {
if [[ -d "$TEMP_DIR" ]]; then
rm -rf "$TEMP_DIR"
fi
}
# Trap cleanup on exit
trap cleanup EXIT
log "Starting DocFast backup: $BACKUP_NAME"
# Create temporary directory
mkdir -p "$TEMP_DIR"
mkdir -p "$(dirname "$LOG_FILE")"
# 1. PostgreSQL dump
log "Creating PostgreSQL dump..."
export PGPASSFILE="/root/.pgpass"
pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" \
--no-password --verbose --clean --if-exists --format=custom \
> "$TEMP_DIR/docfast-db.dump" 2>>"$LOG_FILE" || error_exit "PostgreSQL dump failed"
# Verify dump is valid
if ! pg_restore --list "$TEMP_DIR/docfast-db.dump" >/dev/null 2>&1; then
error_exit "PostgreSQL dump verification failed"
fi
log "PostgreSQL dump completed: $(stat -c%s "$TEMP_DIR/docfast-db.dump") bytes"
# 2. Docker volumes
log "Backing up Docker volumes..."
mkdir -p "$TEMP_DIR/docker-volumes"
if [[ -d "/var/lib/docker/volumes" ]]; then
cp -r /var/lib/docker/volumes/* "$TEMP_DIR/docker-volumes/" || error_exit "Docker volumes backup failed"
log "Docker volumes backed up"
else
log "WARNING: No Docker volumes found"
fi
# 3. Nginx configuration
log "Backing up nginx configuration..."
mkdir -p "$TEMP_DIR/nginx"
cp -r /etc/nginx/* "$TEMP_DIR/nginx/" || error_exit "Nginx backup failed"
log "Nginx configuration backed up"
# 4. SSL certificates
log "Backing up SSL certificates..."
mkdir -p "$TEMP_DIR/letsencrypt"
cp -r /etc/letsencrypt/* "$TEMP_DIR/letsencrypt/" || error_exit "SSL certificates backup failed"
log "SSL certificates backed up"
# 5. Crontabs
log "Backing up crontabs..."
mkdir -p "$TEMP_DIR/crontabs"
if [[ -d "/var/spool/cron/crontabs" ]]; then
cp -r /var/spool/cron/crontabs/* "$TEMP_DIR/crontabs/" 2>/dev/null || log "No crontabs found"
fi
# Also backup user crontabs
crontab -l > "$TEMP_DIR/crontabs/root-crontab.txt" 2>/dev/null || echo "# No root crontab" > "$TEMP_DIR/crontabs/root-crontab.txt"
log "Crontabs backed up"
# 6. OpenDKIM keys
log "Backing up OpenDKIM keys..."
mkdir -p "$TEMP_DIR/opendkim"
cp -r /etc/opendkim/* "$TEMP_DIR/opendkim/" || error_exit "OpenDKIM backup failed"
log "OpenDKIM keys backed up"
# 7. DocFast application files (docker-compose, env, scripts)
log "Backing up DocFast application files..."
mkdir -p "$TEMP_DIR/docfast-app"
if [[ -d "/opt/docfast" ]]; then
cp /opt/docfast/docker-compose.yml "$TEMP_DIR/docfast-app/" 2>/dev/null || true
cp /opt/docfast/.env "$TEMP_DIR/docfast-app/" 2>/dev/null || true
cp -r /opt/docfast/scripts "$TEMP_DIR/docfast-app/" 2>/dev/null || true
cp -r /opt/docfast/deploy "$TEMP_DIR/docfast-app/" 2>/dev/null || true
log "DocFast application files backed up"
fi
# 8. System information
log "Creating system information backup..."
mkdir -p "$TEMP_DIR/system"
systemctl list-unit-files --state=enabled > "$TEMP_DIR/system/enabled-services.txt"
dpkg -l > "$TEMP_DIR/system/installed-packages.txt"
uname -a > "$TEMP_DIR/system/system-info.txt"
df -h > "$TEMP_DIR/system/disk-usage.txt"
log "System information backed up"
# 9. Create Borg backup
log "Creating Borg backup..."
export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
export BORG_RELOCATED_REPO_ACCESS_IS_OK=yes
# Initialize repository if it doesn't exist
if [[ ! -d "$BORG_REPO" ]]; then
log "Initializing new Borg repository..."
borg init --encryption=repokey "$BORG_REPO" || error_exit "Failed to initialize Borg repository"
fi
# Create backup
log "Creating Borg archive: $BACKUP_NAME"
borg create \
--verbose \
--filter AME \
--list \
--stats \
--show-rc \
--compression lz4 \
--exclude-caches \
"$BORG_REPO::$BACKUP_NAME" \
"$TEMP_DIR" 2>>"$LOG_FILE" || error_exit "Borg backup creation failed"
# 10. Prune old backups (7 daily, 4 weekly, 3 monthly)
log "Pruning old backups..."
borg prune \
--list \
--prefix 'docfast-' \
--show-rc \
--keep-daily 7 \
--keep-weekly 4 \
--keep-monthly 3 \
"$BORG_REPO" 2>>"$LOG_FILE" || error_exit "Borg pruning failed"
# 11. Compact repository
log "Compacting repository..."
borg compact "$BORG_REPO" 2>>"$LOG_FILE" || log "WARNING: Repository compaction failed (non-fatal)"
# 12. Repository info
log "Backup completed successfully!"
borg info "$BORG_REPO" 2>>"$LOG_FILE"
log "DocFast backup completed: $BACKUP_NAME"

View file

@ -0,0 +1,90 @@
#!/bin/bash
# DocFast Off-site BorgBackup to Hetzner Storage Box
# Runs AFTER local backup completes (cron: 03:30 UTC)
# Same data & retention as local: 7 daily + 4 weekly + 3 monthly
set -uo pipefail
REMOTE_REPO="ssh://u149513-sub11@u149513-sub11.your-backup.de:23/./docfast-1"
BACKUP_NAME="docfast-$(date +%Y-%m-%d_%H%M)"
LOG_FILE="/var/log/docfast-backup.log"
log() {
echo "$(date '+%Y-%m-%d %H:%M:%S') - [OFFSITE] $1" | tee -a "$LOG_FILE"
}
export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
export BORG_RELOCATED_REPO_ACCESS_IS_OK=yes
export BORG_RSH="ssh -o StrictHostKeyChecking=no"
# Prepare data (same as local backup script)
TEMP_DIR="/tmp/docfast-backup-offsite-$$"
mkdir -p "$TEMP_DIR"
trap "rm -rf $TEMP_DIR" EXIT
# PostgreSQL dump
log "Dumping PostgreSQL..."
export PGPASSFILE="/root/.pgpass"
pg_dump -h localhost -p 5432 -U docfast -d docfast \
--no-password --clean --if-exists --format=custom \
> "$TEMP_DIR/docfast-db.dump" 2>>"$LOG_FILE" || log "WARNING: PostgreSQL dump failed"
# Docker volumes
mkdir -p "$TEMP_DIR/docker-volumes"
cp -r /var/lib/docker/volumes/* "$TEMP_DIR/docker-volumes/" 2>/dev/null || true
# Nginx
mkdir -p "$TEMP_DIR/nginx"
cp -r /etc/nginx/* "$TEMP_DIR/nginx/" 2>/dev/null || true
# SSL
mkdir -p "$TEMP_DIR/letsencrypt"
cp -r /etc/letsencrypt/* "$TEMP_DIR/letsencrypt/" 2>/dev/null || true
# Crontabs
mkdir -p "$TEMP_DIR/crontabs"
cp -r /var/spool/cron/crontabs/* "$TEMP_DIR/crontabs/" 2>/dev/null || true
crontab -l > "$TEMP_DIR/crontabs/root-crontab.txt" 2>/dev/null || true
# OpenDKIM
mkdir -p "$TEMP_DIR/opendkim"
cp -r /etc/opendkim/* "$TEMP_DIR/opendkim/" 2>/dev/null || true
# App files
mkdir -p "$TEMP_DIR/docfast-app"
cp /opt/docfast/docker-compose.yml "$TEMP_DIR/docfast-app/" 2>/dev/null || true
cp /opt/docfast/.env "$TEMP_DIR/docfast-app/" 2>/dev/null || true
cp -r /opt/docfast/scripts "$TEMP_DIR/docfast-app/" 2>/dev/null || true
cp -r /opt/docfast/deploy "$TEMP_DIR/docfast-app/" 2>/dev/null || true
# System info
mkdir -p "$TEMP_DIR/system"
systemctl list-unit-files --state=enabled > "$TEMP_DIR/system/enabled-services.txt" 2>/dev/null || true
dpkg -l > "$TEMP_DIR/system/installed-packages.txt" 2>/dev/null || true
log "Starting off-site backup: $BACKUP_NAME"
# Create remote backup
borg create \
--stats \
--compression lz4 \
--exclude-caches \
"$REMOTE_REPO::$BACKUP_NAME" \
"$TEMP_DIR" 2>>"$LOG_FILE" || { log "ERROR: Off-site backup creation failed"; exit 1; }
log "Off-site backup created, pruning..."
# Prune (same retention as local)
borg prune \
--list \
--prefix 'docfast-' \
--keep-daily 7 \
--keep-weekly 4 \
--keep-monthly 3 \
"$REMOTE_REPO" 2>>"$LOG_FILE" || log "WARNING: Off-site prune failed"
# Compact
borg compact "$REMOTE_REPO" 2>>"$LOG_FILE" || true
log "Off-site backup completed: $BACKUP_NAME"
borg info "$REMOTE_REPO" 2>>"$LOG_FILE"

View file

@ -0,0 +1,150 @@
#!/bin/bash
# DocFast BorgBackup Restore Script
# Restores from Borg backup for disaster recovery
set -euo pipefail
# Configuration
BORG_REPO="/opt/borg-backups/docfast"
RESTORE_DIR="/tmp/docfast-restore-$$"
LOG_FILE="/var/log/docfast-restore.log"
# Usage function
usage() {
echo "Usage: $0 [list|restore] [archive-name]"
echo " list - List available archives"
echo " restore <archive-name> - Restore specific archive"
echo " restore latest - Restore latest archive"
echo ""
echo "Examples:"
echo " $0 list"
echo " $0 restore docfast-2026-02-15_0300"
echo " $0 restore latest"
exit 1
}
# Logging function
log() {
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE"
}
# Error handler
error_exit() {
log "ERROR: $1"
cleanup
exit 1
}
# Cleanup function
cleanup() {
if [[ -d "$RESTORE_DIR" ]]; then
log "Cleaning up temporary directory: $RESTORE_DIR"
rm -rf "$RESTORE_DIR"
fi
}
# Trap cleanup on exit
trap cleanup EXIT
# Check if repository exists
if [[ ! -d "$BORG_REPO" ]]; then
error_exit "Borg repository not found: $BORG_REPO"
fi
# Set up environment
export BORG_PASSPHRASE="docfast-backup-$(date +%Y)"
export BORG_RELOCATED_REPO_ACCESS_IS_OK=yes
mkdir -p "$(dirname "$LOG_FILE")"
# Parse command line
case "${1:-}" in
"list")
log "Listing available archives..."
borg list "$BORG_REPO"
exit 0
;;
"restore")
ARCHIVE_NAME="${2:-}"
if [[ -z "$ARCHIVE_NAME" ]]; then
usage
fi
if [[ "$ARCHIVE_NAME" == "latest" ]]; then
log "Finding latest archive..."
ARCHIVE_NAME=$(borg list --short "$BORG_REPO" | grep "^docfast-" | tail -1)
if [[ -z "$ARCHIVE_NAME" ]]; then
error_exit "No archives found in repository"
fi
log "Latest archive found: $ARCHIVE_NAME"
fi
;;
*)
usage
;;
esac
log "Starting restore of archive: $ARCHIVE_NAME"
# Verify archive exists
if ! borg list "$BORG_REPO::$ARCHIVE_NAME" >/dev/null 2>&1; then
error_exit "Archive not found: $ARCHIVE_NAME"
fi
# Create restore directory
mkdir -p "$RESTORE_DIR"
log "Restoring to temporary directory: $RESTORE_DIR"
# Extract archive
log "Extracting archive..."
cd "$RESTORE_DIR"
borg extract --verbose --list "$BORG_REPO::$ARCHIVE_NAME"
log "Archive extracted successfully. Restore data available at: $RESTORE_DIR"
echo ""
echo "RESTORE LOCATIONS:"
echo "=================="
echo "PostgreSQL dump: $RESTORE_DIR/tmp/docfast-backup-*/docfast-db.dump"
echo "Docker volumes: $RESTORE_DIR/tmp/docfast-backup-*/docker-volumes/"
echo "Nginx config: $RESTORE_DIR/tmp/docfast-backup-*/nginx/"
echo "SSL certificates: $RESTORE_DIR/tmp/docfast-backup-*/letsencrypt/"
echo "Crontabs: $RESTORE_DIR/tmp/docfast-backup-*/crontabs/"
echo "OpenDKIM keys: $RESTORE_DIR/tmp/docfast-backup-*/opendkim/"
echo "DocFast app files: $RESTORE_DIR/tmp/docfast-backup-*/docfast-app/"
echo "System info: $RESTORE_DIR/tmp/docfast-backup-*/system/"
echo ""
echo "MANUAL RESTORE STEPS:"
echo "====================="
echo "1. Stop DocFast service:"
echo " systemctl stop docker"
echo ""
echo "2. Restore PostgreSQL database:"
echo " sudo -u postgres dropdb docfast"
echo " sudo -u postgres createdb -O docfast docfast"
echo " sudo -u postgres pg_restore -d docfast $RESTORE_DIR/tmp/docfast-backup-*/docfast-db.dump"
echo ""
echo "3. Restore Docker volumes:"
echo " cp -r $RESTORE_DIR/tmp/docfast-backup-*/docker-volumes/* /var/lib/docker/volumes/"
echo ""
echo "4. Restore configuration files:"
echo " cp -r $RESTORE_DIR/tmp/docfast-backup-*/nginx/* /etc/nginx/"
echo " cp -r $RESTORE_DIR/tmp/docfast-backup-*/letsencrypt/* /etc/letsencrypt/"
echo " cp -r $RESTORE_DIR/tmp/docfast-backup-*/opendkim/* /etc/opendkim/"
echo " cp -r $RESTORE_DIR/tmp/docfast-backup-*/docfast-app/* /opt/docfast/"
echo ""
echo "5. Restore crontabs:"
echo " cp $RESTORE_DIR/tmp/docfast-backup-*/crontabs/root /var/spool/cron/crontabs/root"
echo " chmod 600 /var/spool/cron/crontabs/root"
echo ""
echo "6. Set correct permissions:"
echo " chown -R opendkim:opendkim /etc/opendkim/keys"
echo " chown -R postgres:postgres /var/lib/postgresql"
echo ""
echo "7. Start services:"
echo " systemctl start postgresql"
echo " systemctl start docker"
echo " cd /opt/docfast && docker-compose up -d"
echo ""
echo "WARNING: This script does NOT automatically restore files to prevent"
echo "accidental overwrites. Follow the manual steps above carefully."
log "Restore extraction completed. Follow manual steps to complete restoration."

View file

@ -0,0 +1,70 @@
#!/usr/bin/env node
/**
* Build-time HTML templating system for DocFast.
* No dependencies uses only Node.js built-ins.
*
* - Reads page sources from templates/pages/*.html
* - Reads partials from templates/partials/*.html
* - Replaces {{> partial_name}} with partial content
* - Supports {{title}} variable (set via <!-- title: Page Title --> comment at top)
* - Writes output to public/
*/
import { readFileSync, writeFileSync, readdirSync, mkdirSync } from 'node:fs';
import { join, basename } from 'node:path';
import { fileURLToPath } from 'node:url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
const ROOT = join(__dirname, '..');
const PAGES_DIR = join(ROOT, 'templates', 'pages');
const PARTIALS_DIR = join(ROOT, 'templates', 'partials');
const OUTPUT_DIR = join(ROOT, 'public');
// Load all partials
const partials = {};
for (const file of readdirSync(PARTIALS_DIR)) {
if (!file.endsWith('.html')) continue;
const name = file.replace('.html', '');
partials[name] = readFileSync(join(PARTIALS_DIR, file), 'utf-8');
}
console.log(`Loaded ${Object.keys(partials).length} partials: ${Object.keys(partials).join(', ')}`);
// Process each page
const pages = readdirSync(PAGES_DIR).filter(f => f.endsWith('.html'));
console.log(`Processing ${pages.length} pages...`);
for (const file of pages) {
let content = readFileSync(join(PAGES_DIR, file), 'utf-8');
// Extract title from <!-- title: ... --> comment
let title = '';
const titleMatch = content.match(/^<!--\s*title:\s*(.+?)\s*-->/);
if (titleMatch) {
title = titleMatch[1];
// Remove the title comment from output
content = content.replace(/^<!--\s*title:.+?-->\n?/, '');
}
// Replace {{> partial_name}} with partial content (support nested partials)
let maxDepth = 5;
while (maxDepth-- > 0 && content.includes('{{>')) {
content = content.replace(/\{\{>\s*([a-zA-Z0-9_-]+)\s*\}\}/g, (match, name) => {
if (!(name in partials)) {
console.warn(` Warning: partial "${name}" not found in ${file}`);
return match;
}
return partials[name];
});
}
// Replace {{title}} variable
content = content.replace(/\{\{title\}\}/g, title);
// Write output
const outPath = join(OUTPUT_DIR, file);
writeFileSync(outPath, content);
console.log(`${file} (${(content.length / 1024).toFixed(1)}KB)`);
}
console.log('Done!');

View file

@ -0,0 +1,49 @@
#!/bin/bash
# DocFast SQLite Backup Script
# Runs every 6 hours via cron. Keeps 7 daily + 4 weekly backups.
set -euo pipefail
BACKUP_DIR="/opt/docfast-backups"
DB_PATH="/var/lib/docker/volumes/docfast_docfast-data/_data/docfast.db"
DATE=$(date +%Y-%m-%d_%H%M)
DAY_OF_WEEK=$(date +%u) # 1=Monday, 7=Sunday
mkdir -p "$BACKUP_DIR"
# Check if database exists
if [[ ! -f "$DB_PATH" ]]; then
echo "ERROR: Database not found at $DB_PATH" >&2
exit 1
fi
# Safe hot backup using sqlite3 .backup
DAILY_FILE="$BACKUP_DIR/docfast-daily-${DATE}.db"
sqlite3 "$DB_PATH" ".backup '$DAILY_FILE'"
# Verify backup is valid
if ! sqlite3 "$DAILY_FILE" "PRAGMA integrity_check;" | grep -q "^ok$"; then
echo "ERROR: Backup integrity check failed!" >&2
rm -f "$DAILY_FILE"
exit 1
fi
echo "Created backup: $DAILY_FILE ($(stat -c%s "$DAILY_FILE") bytes)"
# On Sundays, also keep a weekly copy
if [ "$DAY_OF_WEEK" -eq 7 ]; then
WEEKLY_FILE="$BACKUP_DIR/docfast-weekly-$(date +%Y-%m-%d).db"
cp "$DAILY_FILE" "$WEEKLY_FILE"
echo "Created weekly backup: $WEEKLY_FILE"
fi
# Rotate: keep last 7 daily backups (28 files at 6h intervals = ~7 days)
find "$BACKUP_DIR" -name "docfast-daily-*.db" -type f | sort -r | tail -n +29 | xargs -r rm -f
# Rotate: keep last 4 weekly backups
find "$BACKUP_DIR" -name "docfast-weekly-*.db" -type f | sort -r | tail -n +5 | xargs -r rm -f || true
# Show current backup status
DAILY_COUNT=$(find "$BACKUP_DIR" -name "docfast-daily-*.db" -type f | wc -l)
WEEKLY_COUNT=$(find "$BACKUP_DIR" -name "docfast-weekly-*.db" -type f | wc -l)
echo "Backup rotation complete. Daily: $DAILY_COUNT, Weekly: $WEEKLY_COUNT"

View file

@ -0,0 +1,143 @@
#!/usr/bin/env node
/**
* Migration script: JSON files PostgreSQL
* Run on the server where JSON data files exist.
* Usage: DATABASE_PASSWORD=docfast node scripts/migrate-to-postgres.mjs
*/
import pg from "pg";
import { readFileSync, existsSync } from "fs";
const { Pool } = pg;
const pool = new Pool({
host: process.env.DATABASE_HOST || "127.0.0.1",
port: parseInt(process.env.DATABASE_PORT || "5432", 10),
database: process.env.DATABASE_NAME || "docfast",
user: process.env.DATABASE_USER || "docfast",
password: process.env.DATABASE_PASSWORD || "docfast",
});
async function migrate() {
const client = await pool.connect();
try {
// Create tables
await client.query(`
CREATE TABLE IF NOT EXISTS api_keys (
key TEXT PRIMARY KEY,
tier TEXT NOT NULL DEFAULT 'free',
email TEXT NOT NULL DEFAULT '',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
stripe_customer_id TEXT
);
CREATE INDEX IF NOT EXISTS idx_api_keys_email ON api_keys(email);
CREATE INDEX IF NOT EXISTS idx_api_keys_stripe ON api_keys(stripe_customer_id);
CREATE TABLE IF NOT EXISTS verifications (
id SERIAL PRIMARY KEY,
email TEXT NOT NULL,
token TEXT NOT NULL UNIQUE,
api_key TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
verified_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS idx_verifications_email ON verifications(email);
CREATE INDEX IF NOT EXISTS idx_verifications_token ON verifications(token);
CREATE TABLE IF NOT EXISTS pending_verifications (
email TEXT PRIMARY KEY,
code TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL,
attempts INT NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS usage (
key TEXT PRIMARY KEY,
count INT NOT NULL DEFAULT 0,
month_key TEXT NOT NULL
);
`);
console.log("✅ Tables created");
// Migrate keys.json
const keysPath = "/opt/docfast/data/keys.json";
if (existsSync(keysPath)) {
const keysData = JSON.parse(readFileSync(keysPath, "utf-8"));
const keys = keysData.keys || [];
let keyCount = 0;
for (const k of keys) {
await client.query(
`INSERT INTO api_keys (key, tier, email, created_at, stripe_customer_id)
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (key) DO NOTHING`,
[k.key, k.tier, k.email || "", k.createdAt, k.stripeCustomerId || null]
);
keyCount++;
}
console.log(`✅ Migrated ${keyCount} API keys`);
} else {
// Try docker volume path
console.log("⚠️ keys.json not found at", keysPath);
}
// Migrate verifications.json
const verifPath = "/opt/docfast/data/verifications.json";
if (existsSync(verifPath)) {
const data = JSON.parse(readFileSync(verifPath, "utf-8"));
const verifications = Array.isArray(data) ? data : (data.verifications || []);
const pending = data.pendingVerifications || [];
let vCount = 0;
for (const v of verifications) {
await client.query(
`INSERT INTO verifications (email, token, api_key, created_at, verified_at)
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (token) DO NOTHING`,
[v.email, v.token, v.apiKey, v.createdAt, v.verifiedAt || null]
);
vCount++;
}
console.log(`✅ Migrated ${vCount} verifications`);
let pCount = 0;
for (const p of pending) {
await client.query(
`INSERT INTO pending_verifications (email, code, created_at, expires_at, attempts)
VALUES ($1, $2, $3, $4, $5) ON CONFLICT (email) DO NOTHING`,
[p.email, p.code, p.createdAt, p.expiresAt, p.attempts]
);
pCount++;
}
console.log(`✅ Migrated ${pCount} pending verifications`);
} else {
console.log("⚠️ verifications.json not found at", verifPath);
}
// Migrate usage.json
const usagePath = "/opt/docfast/data/usage.json";
if (existsSync(usagePath)) {
const usageData = JSON.parse(readFileSync(usagePath, "utf-8"));
let uCount = 0;
for (const [key, record] of Object.entries(usageData)) {
const r = /** @type {any} */ (record);
await client.query(
`INSERT INTO usage (key, count, month_key)
VALUES ($1, $2, $3) ON CONFLICT (key) DO UPDATE SET count = $2, month_key = $3`,
[key, r.count, r.monthKey]
);
uCount++;
}
console.log(`✅ Migrated ${uCount} usage records`);
} else {
console.log("⚠️ usage.json not found at", usagePath);
}
console.log("\n🎉 Migration complete!");
} finally {
client.release();
await pool.end();
}
}
migrate().catch((err) => {
console.error("Migration failed:", err);
process.exit(1);
});

View file

@ -0,0 +1,72 @@
#!/bin/bash
set -e
echo "🔄 DocFast Rollback Script"
echo "=========================="
# Check if we're on the server
if [ ! -d "/root/docfast" ]; then
echo "❌ This script should be run on the production server"
exit 1
fi
cd /root/docfast
# List available rollback images
echo "📋 Available rollback images:"
ROLLBACK_IMAGES=$(docker images --format "table {{.Repository}}:{{.Tag}}\t{{.CreatedAt}}" | grep "docfast-docfast:rollback-" | head -10)
if [ -z "$ROLLBACK_IMAGES" ]; then
echo "❌ No rollback images available"
exit 1
fi
echo "$ROLLBACK_IMAGES"
echo ""
# Get the most recent rollback image
LATEST_ROLLBACK=$(docker images --format "{{.Repository}}:{{.Tag}}" | grep "docfast-docfast:rollback-" | head -n1)
if [ -z "$LATEST_ROLLBACK" ]; then
echo "❌ No rollback image found"
exit 1
fi
echo "🎯 Will rollback to: $LATEST_ROLLBACK"
echo ""
# Confirm rollback
read -p "⚠️ Are you sure you want to rollback? (y/N): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "❌ Rollback cancelled"
exit 1
fi
echo "🛑 Stopping current services..."
docker compose down --timeout 30
echo "🔄 Rolling back to $LATEST_ROLLBACK..."
docker tag $LATEST_ROLLBACK docfast-docfast:latest
echo "▶️ Starting services..."
docker compose up -d
echo "⏱️ Waiting for service to be ready..."
for i in {1..20}; do
if curl -f -s http://127.0.0.1:3100/health > /dev/null; then
echo "✅ Rollback successful! Service is healthy."
break
fi
if [ $i -eq 20 ]; then
echo "❌ Rollback failed - service is not responding"
exit 1
fi
echo "⏳ Attempt $i/20 - waiting 3 seconds..."
sleep 3
done
echo "📊 Service status:"
docker compose ps
echo "🎉 Rollback completed successfully!"

View file

@ -0,0 +1,41 @@
#!/bin/bash
echo "🔐 Forgejo Repository Secrets Setup"
echo "===================================="
# Source credentials to get Forgejo token
source /home/openclaw/.openclaw/workspace/.credentials/docfast.env
# Repository secrets to set up
REPO_URL="https://git.cloonar.com/api/v1/repos/openclawd/docfast/actions/secrets"
echo "Setting up repository secrets for CI/CD..."
# Server host
echo "📡 Setting SERVER_HOST..."
curl -X PUT "$REPO_URL/SERVER_HOST" \
-H "Authorization: token $FORGEJO_TOKEN" \
-H "Content-Type: application/json" \
-d '{"data":"167.235.156.214"}' \
--silent
# Server user
echo "👤 Setting SERVER_USER..."
curl -X PUT "$REPO_URL/SERVER_USER" \
-H "Authorization: token $FORGEJO_TOKEN" \
-H "Content-Type: application/json" \
-d '{"data":"root"}' \
--silent
# SSH Private Key
echo "🔑 Setting SSH_PRIVATE_KEY..."
SSH_KEY_CONTENT=$(cat /home/openclaw/.ssh/docfast | jq -Rs .)
curl -X PUT "$REPO_URL/SSH_PRIVATE_KEY" \
-H "Authorization: token $FORGEJO_TOKEN" \
-H "Content-Type: application/json" \
-d "{\"data\":$SSH_KEY_CONTENT}" \
--silent
echo "✅ Repository secrets have been configured!"
echo ""
echo "🔍 To verify, check: https://git.cloonar.com/openclawd/docfast/settings/actions/secrets"