- Gitea: add validate-on-pr.yml (run-all-validation only; no deploy) - .env.master.example: document NPM_EMAIL/NPM_PASSWORD for backup-npmplus - pnpm: allowedVersions for @solana/sysvars to quiet thirdweb/x402 peer drift - AGENTS + verify README: CI pointers and .env.master.example for env - backup-npmplus: npm_lxc_ssh helper; keep prior timeout/BatchMode behavior - check-pnpm-workspace-lockfile + run-all-validation step 1b (from prior work in same commit set) Made-with: Cursor
296 lines
11 KiB
Bash
Executable File
296 lines
11 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# Automated NPMplus Backup Script
|
|
# Backs up database, proxy hosts, certificates, and configuration.
|
|
# Usage: bash scripts/verify/backup-npmplus.sh [--dry-run]
|
|
|
|
set -euo pipefail
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
|
|
# Colors
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m'
|
|
|
|
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
|
log_success() { echo -e "${GREEN}[✓]${NC} $1"; }
|
|
log_warn() { echo -e "${YELLOW}[⚠]${NC} $1"; }
|
|
log_error() { echo -e "${RED}[✗]${NC} $1"; }
|
|
|
|
cd "$PROJECT_ROOT"
|
|
|
|
# Source dotenv (operator creds): repo root .env then smom-dbis-138/.env
|
|
if [ -f .env ]; then
|
|
set +euo pipefail
|
|
source .env 2>/dev/null || true
|
|
set -euo pipefail
|
|
fi
|
|
if [ -f smom-dbis-138/.env ]; then
|
|
set +euo pipefail
|
|
source smom-dbis-138/.env 2>/dev/null || true
|
|
set -euo pipefail
|
|
fi
|
|
|
|
# Load ip-addresses.conf for fallbacks (before cd)
|
|
[ -f "${PROJECT_ROOT}/config/ip-addresses.conf" ] && source "${PROJECT_ROOT}/config/ip-addresses.conf" 2>/dev/null || true
|
|
# Configuration (from .env; NPMPLUS_* fall back to NPM_* / PROXMOX_HOST per .env.example)
|
|
NPMPLUS_VMID="${NPMPLUS_VMID:-${NPM_VMID:-10233}}"
|
|
NPMPLUS_HOST="${NPMPLUS_HOST:-${NPM_PROXMOX_HOST:-${PROXMOX_HOST:-${PROXMOX_HOST_R630_01:-192.168.11.11}}}}"
|
|
NPM_URL="${NPM_URL:-https://${IP_NPMPLUS:-${IP_NPMPLUS:-192.168.11.167}}:81}"
|
|
# Set NPM_EMAIL in .env (no default — avoids baking personal addresses in repo defaults)
|
|
NPM_EMAIL="${NPM_EMAIL:-}"
|
|
NPM_PASSWORD="${NPM_PASSWORD:-}"
|
|
|
|
# Proxmox host: SSH to node running NPM LXC (consistent timeouts, BatchMode for automation)
|
|
npm_lxc_ssh() {
|
|
ssh -o ConnectTimeout=15 -o BatchMode=yes "root@$NPMPLUS_HOST" "$@"
|
|
}
|
|
|
|
DRY_RUN=false
|
|
[[ "${1:-}" == "--dry-run" ]] && DRY_RUN=true
|
|
|
|
# Backup destination
|
|
BACKUP_BASE_DIR="${BACKUP_DIR:-$PROJECT_ROOT/backups/npmplus}"
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
BACKUP_DIR="$BACKUP_BASE_DIR/backup-$TIMESTAMP"
|
|
|
|
echo ""
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo "💾 NPMplus Backup Script"
|
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
echo ""
|
|
|
|
# Validate NPM password and API identity (skip for dry-run)
|
|
if [[ "$DRY_RUN" != true ]]; then
|
|
if [ -z "$NPM_PASSWORD" ]; then
|
|
log_error "NPM_PASSWORD is required (set in .env or export before running)"
|
|
exit 1
|
|
fi
|
|
if [ -z "$NPM_EMAIL" ]; then
|
|
log_error "NPM_EMAIL is required for API export steps (set in .env; no default)"
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
if [[ "$DRY_RUN" == true ]]; then
|
|
log_info "DRY-RUN: would backup NPMplus (database, API exports, certs) to $BACKUP_DIR"
|
|
log_info "Run without --dry-run to perform backup."
|
|
exit 0
|
|
fi
|
|
|
|
mkdir -p "$BACKUP_DIR"
|
|
log_info "Backup destination: $BACKUP_DIR"
|
|
echo ""
|
|
|
|
# Step 1: Backup SQLite Database
|
|
log_info "Step 1: Backing up NPMplus database..."
|
|
DB_BACKUP_DIR="$BACKUP_DIR/database"
|
|
mkdir -p "$DB_BACKUP_DIR"
|
|
|
|
# Discover database.sqlite (path differs across images / mount layouts)
|
|
NPM_DB_PATH=$(
|
|
npm_lxc_ssh "pct exec $NPMPLUS_VMID -- sh -c '
|
|
for p in /data/database.sqlite /data/database/database.sqlite; do
|
|
[ -f \"\$p\" ] && { echo \"\$p\"; exit 0; }
|
|
done
|
|
f=\$(find /data -maxdepth 6 -name database.sqlite 2>/dev/null | head -1)
|
|
if [ -n \"\$f\" ] && [ -f \"\$f\" ]; then echo \"\$f\"; else echo \"\"; fi
|
|
'" 2>/dev/null | tr -d '\r' || true
|
|
)
|
|
NPM_DB_PATH="${NPM_DB_PATH//$'\n'/}"
|
|
if [ -n "$NPM_DB_PATH" ]; then
|
|
log_info " Found DB at: $NPM_DB_PATH (inside LXC)"
|
|
else
|
|
log_info " No database.sqlite at common paths; dump may be skipped (check container / mounts)"
|
|
fi
|
|
# Method 1: SQL dump
|
|
log_info " Creating SQL dump..."
|
|
NPM_DB_PQ=""
|
|
[ -n "$NPM_DB_PATH" ] && NPM_DB_PQ=$(printf %q "$NPM_DB_PATH")
|
|
if [ -n "$NPM_DB_PATH" ]; then
|
|
npm_lxc_ssh "pct exec $NPMPLUS_VMID -- sh -c \"if command -v sqlite3 >/dev/null 2>&1; then sqlite3 $NPM_DB_PQ .dump; else echo _NO_SQLITE3; fi\"" \
|
|
> "$DB_BACKUP_DIR/database.sql" 2>/dev/null || : > "$DB_BACKUP_DIR/database.sql"
|
|
if grep -qxF '_NO_SQLITE3' "$DB_BACKUP_DIR/database.sql" 2>/dev/null; then
|
|
: > "$DB_BACKUP_DIR/database.sql"
|
|
fi
|
|
else
|
|
: > "$DB_BACKUP_DIR/database.sql"
|
|
fi
|
|
|
|
# Method 2: Direct file copy (binary; complements SQL)
|
|
if [ -n "$NPM_DB_PATH" ]; then
|
|
log_info " Copying database file (binary)..."
|
|
if ! npm_lxc_ssh "pct exec $NPMPLUS_VMID -- sh -c \"cat $NPM_DB_PQ\"" > "$DB_BACKUP_DIR/database.sqlite" 2>/dev/null; then
|
|
if [ -s "$DB_BACKUP_DIR/database.sql" ] && grep -qiE 'CREATE|INSERT|PRAGMA' "$DB_BACKUP_DIR/database.sql" 2>/dev/null; then
|
|
log_info " Skipped or failed binary copy; SQL dump contains schema/data and is usable for restore"
|
|
else
|
|
log_warn " Direct copy failed and SQL dump empty or unusable — check LXC/SSH, sqlite3, or volume mount"
|
|
fi
|
|
else
|
|
log_info " Binary database copy OK"
|
|
fi
|
|
else
|
|
: > "$DB_BACKUP_DIR/database.sqlite"
|
|
log_info " Skipping binary copy (no DB file resolved)"
|
|
fi
|
|
if ( [ -s "$DB_BACKUP_DIR/database.sql" ] && grep -qiE 'CREATE|INSERT|PRAGMA' "$DB_BACKUP_DIR/database.sql" 2>/dev/null ) || [ -s "$DB_BACKUP_DIR/database.sqlite" ]; then
|
|
log_success " Database backup completed"
|
|
else
|
|
log_warn " Database backup empty — LXC not reachable, DB path changed, or sqlite3 missing in container"
|
|
fi
|
|
|
|
# Step 2: Export Proxy Hosts via API
|
|
log_info "Step 2: Exporting proxy hosts configuration..."
|
|
API_BACKUP_DIR="$BACKUP_DIR/api"
|
|
mkdir -p "$API_BACKUP_DIR"
|
|
|
|
# Authenticate
|
|
log_info " Authenticating to NPMplus API..."
|
|
TOKEN_RESPONSE=$(curl -s -k -X POST "$NPM_URL/api/tokens" \
|
|
-H "Content-Type: application/json" \
|
|
-d "{\"identity\":\"$NPM_EMAIL\",\"secret\":\"$NPM_PASSWORD\"}")
|
|
|
|
TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.token // empty' 2>/dev/null || echo "")
|
|
|
|
if [ -z "$TOKEN" ] || [ "$TOKEN" = "null" ]; then
|
|
log_error " Failed to authenticate to NPMplus API"
|
|
log_warn " Skipping API-based exports"
|
|
else
|
|
log_success " Authenticated successfully"
|
|
|
|
# Export proxy hosts
|
|
log_info " Exporting proxy hosts..."
|
|
curl -s -k -X GET "$NPM_URL/api/nginx/proxy-hosts" \
|
|
-H "Authorization: Bearer $TOKEN" | jq '.' > "$API_BACKUP_DIR/proxy_hosts.json" || {
|
|
log_warn " Failed to export proxy hosts"
|
|
}
|
|
|
|
# Export certificates
|
|
log_info " Exporting certificates..."
|
|
curl -s -k -X GET "$NPM_URL/api/nginx/certificates" \
|
|
-H "Authorization: Bearer $TOKEN" | jq '.' > "$API_BACKUP_DIR/certificates.json" || {
|
|
log_warn " Failed to export certificates"
|
|
}
|
|
|
|
# Export access lists
|
|
log_info " Exporting access lists..."
|
|
curl -s -k -X GET "$NPM_URL/api/nginx/access-lists" \
|
|
-H "Authorization: Bearer $TOKEN" | jq '.' > "$API_BACKUP_DIR/access_lists.json" 2>/dev/null || {
|
|
log_warn " Failed to export access lists (may not be supported)"
|
|
}
|
|
|
|
log_success " API exports completed"
|
|
fi
|
|
|
|
# Step 3: Backup Certificate Files
|
|
log_info "Step 3: Backing up certificate files..."
|
|
CERT_BACKUP_DIR="$BACKUP_DIR/certificates"
|
|
mkdir -p "$CERT_BACKUP_DIR"
|
|
|
|
CERT_LIVE_BASE=""
|
|
for _try in /data/tls/certbot/live /etc/letsencrypt/live /data/letsencrypt/live; do
|
|
if npm_lxc_ssh "pct exec $NPMPLUS_VMID -- test -d '$_try'" 2>/dev/null; then
|
|
CERT_LIVE_BASE="$_try"
|
|
break
|
|
fi
|
|
done
|
|
|
|
# List all certificate directories (NPM+ Certbot / Linux layouts)
|
|
if [ -n "$CERT_LIVE_BASE" ]; then
|
|
log_info " Listing certificates (from $CERT_LIVE_BASE)..."
|
|
npm_lxc_ssh "pct exec $NPMPLUS_VMID -- ls -1 $CERT_LIVE_BASE/ 2>/dev/null" | grep -v '^lost+found$' | grep -v '^$' > "$CERT_BACKUP_DIR/cert_list.txt" 2>/dev/null || {
|
|
: > "$CERT_BACKUP_DIR/cert_list.txt"
|
|
}
|
|
else
|
|
: > "$CERT_BACKUP_DIR/cert_list.txt"
|
|
log_info " No Certbot/letsencrypt live dir in common paths; PEM files may be elsewhere or API-only"
|
|
fi
|
|
|
|
# Copy certificate files
|
|
if [ -s "$CERT_BACKUP_DIR/cert_list.txt" ]; then
|
|
log_info " Copying certificate files..."
|
|
while IFS= read -r cert_dir; do
|
|
if [ -n "$cert_dir" ] && [ "$cert_dir" != "lost+found" ]; then
|
|
mkdir -p "$CERT_BACKUP_DIR/$cert_dir"
|
|
npm_lxc_ssh "pct exec $NPMPLUS_VMID -- cat $CERT_LIVE_BASE/$cert_dir/fullchain.pem" > "$CERT_BACKUP_DIR/$cert_dir/fullchain.pem" 2>/dev/null || {
|
|
log_warn " Failed to copy fullchain.pem for $cert_dir"
|
|
}
|
|
npm_lxc_ssh "pct exec $NPMPLUS_VMID -- cat $CERT_LIVE_BASE/$cert_dir/privkey.pem" > "$CERT_BACKUP_DIR/$cert_dir/privkey.pem" 2>/dev/null || {
|
|
log_warn " Failed to copy privkey.pem for $cert_dir"
|
|
}
|
|
fi
|
|
done < "$CERT_BACKUP_DIR/cert_list.txt"
|
|
log_success " Certificate files backed up (where readable)"
|
|
else
|
|
API_CERT_N=0
|
|
if [ -f "$API_BACKUP_DIR/certificates.json" ] && command -v jq >/dev/null 2>&1; then
|
|
API_CERT_N=$(jq 'if type == "array" then length else 0 end' "$API_BACKUP_DIR/certificates.json" 2>/dev/null) || API_CERT_N=0
|
|
fi
|
|
if [ "$API_CERT_N" -gt 0 ] 2>/dev/null; then
|
|
log_info " No on-disk cert dirs listed; $API_CERT_N certificate(s) in api/certificates.json (PEMs may be internal only)"
|
|
else
|
|
log_info " No separate PEM backup (none listed or API export missing)"
|
|
fi
|
|
fi
|
|
|
|
# Step 4: Backup Docker Volume (if accessible)
|
|
log_info "Step 4: Attempting Docker volume backup..."
|
|
VOLUME_BACKUP_DIR="$BACKUP_DIR/volumes"
|
|
mkdir -p "$VOLUME_BACKUP_DIR"
|
|
|
|
# Optional: list Docker volumes if docker exists in the LXC
|
|
if npm_lxc_ssh "pct exec $NPMPLUS_VMID -- docker volume ls" > "$VOLUME_BACKUP_DIR/volume_list.txt" 2>/dev/null; then
|
|
log_info " Docker volume list written (if NPM uses host paths instead, that is still OK)"
|
|
else
|
|
log_info " No Docker in LXC or not used — skip volume list"
|
|
fi
|
|
|
|
# Step 5: Create backup manifest
|
|
log_info "Step 5: Creating backup manifest..."
|
|
cat > "$BACKUP_DIR/manifest.json" <<EOF
|
|
{
|
|
"timestamp": "$TIMESTAMP",
|
|
"backup_date": "$(date -Iseconds)",
|
|
"npmplus_vmid": "$NPMPLUS_VMID",
|
|
"npmplus_host": "$NPMPLUS_HOST",
|
|
"npm_url": "$NPM_URL",
|
|
"backup_contents": {
|
|
"database": {
|
|
"sql_dump": "$([ -s "$DB_BACKUP_DIR/database.sql" ] && echo "present" || echo "missing")",
|
|
"sqlite_file": "$([ -s "$DB_BACKUP_DIR/database.sqlite" ] && echo "present" || echo "missing")"
|
|
},
|
|
"api_exports": {
|
|
"proxy_hosts": "$([ -s "$API_BACKUP_DIR/proxy_hosts.json" ] && echo "present" || echo "missing")",
|
|
"certificates": "$([ -s "$API_BACKUP_DIR/certificates.json" ] && echo "present" || echo "missing")",
|
|
"access_lists": "$([ -s "$API_BACKUP_DIR/access_lists.json" ] && echo "present" || echo "missing")"
|
|
},
|
|
"certificate_files": "$([ -s "$CERT_BACKUP_DIR/cert_list.txt" ] && echo "present" || echo "missing")"
|
|
}
|
|
}
|
|
EOF
|
|
|
|
# Step 6: Compress backup
|
|
log_info "Step 6: Compressing backup..."
|
|
cd "$BACKUP_BASE_DIR"
|
|
tar -czf "backup-$TIMESTAMP.tar.gz" "backup-$TIMESTAMP" 2>/dev/null || {
|
|
log_warn " Compression failed - backup directory remains uncompressed"
|
|
}
|
|
|
|
if [ -f "backup-$TIMESTAMP.tar.gz" ]; then
|
|
BACKUP_SIZE=$(du -h "backup-$TIMESTAMP.tar.gz" | cut -f1)
|
|
log_success " Backup compressed: backup-$TIMESTAMP.tar.gz ($BACKUP_SIZE)"
|
|
# Optionally remove uncompressed directory
|
|
# rm -rf "backup-$TIMESTAMP"
|
|
fi
|
|
|
|
echo ""
|
|
log_success "Backup completed successfully!"
|
|
log_info "Backup location: $BACKUP_DIR"
|
|
if [ -f "$BACKUP_BASE_DIR/backup-$TIMESTAMP.tar.gz" ]; then
|
|
log_info "Compressed backup: $BACKUP_BASE_DIR/backup-$TIMESTAMP.tar.gz"
|
|
fi
|
|
echo ""
|