Files
proxmox/scripts/verify/check-npmplus-duplicate-security-headers.sh
defiQUG dbd517b279 Sync workspace: config, docs, scripts, CI, operator rules, and submodule pointers.
- Update dbis_core, cross-chain-pmm-lps, explorer-monorepo, metamask-integration, pr-workspace/chains
- Omit embedded publish git dirs and empty placeholders from index

Made-with: Cursor
2026-04-12 06:12:20 -07:00

144 lines
3.1 KiB
Bash
Executable File

#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
DEFAULT_DOMAINS=(
"sankofa.nexus"
"phoenix.sankofa.nexus"
"the-order.sankofa.nexus"
"dbis-admin.d-bis.org"
"dbis-api.d-bis.org"
"dbis-api-2.d-bis.org"
"secure.d-bis.org"
"mim4u.org"
"www.mim4u.org"
"secure.mim4u.org"
"training.mim4u.org"
"rpc-ws-pub.d-bis.org"
"rpc-http-prv.d-bis.org"
"rpc-ws-prv.d-bis.org"
"rpc.public-0138.defi-oracle.io"
"studio.sankofa.nexus"
"explorer.d-bis.org"
)
HEADERS_TO_CHECK=(
"Content-Security-Policy"
"X-Frame-Options"
"X-Content-Type-Options"
"Referrer-Policy"
"X-XSS-Protection"
"Cache-Control"
)
failures=0
warn_on_unreachable=0
require_cmd() {
command -v "$1" >/dev/null 2>&1 || {
echo "[fail] missing required command: $1" >&2
exit 1
}
}
require_cmd curl
get_domains_from_latest_backup() {
require_cmd jq
local backup
backup="$(ls -1t "${PROJECT_ROOT}"/.ops-backups/npmplus/proxy-hosts-*.json 2>/dev/null | head -n1 || true)"
if [[ -z "$backup" ]]; then
echo "[fail] no NPMplus backup found under ${PROJECT_ROOT}/.ops-backups/npmplus" >&2
exit 1
fi
jq -r '.[] | .domain_names[]?' "$backup" | sort -u
}
ok() {
printf '[ok] %s\n' "$*"
}
warn() {
printf '[warn] %s\n' "$*"
}
fail() {
printf '[fail] %s\n' "$*"
failures=$((failures + 1))
}
count_header_lines() {
local headers="$1"
local name="$2"
printf '%s\n' "$headers" | grep -iEc "^${name}:" || true
}
first_status_line() {
local headers="$1"
printf '%s\n' "$headers" | grep -E '^HTTP/' | head -n1
}
check_domain() {
local domain="$1"
local url="https://${domain}/"
local headers
if ! headers="$(curl -ksSI --connect-timeout 10 --max-time 20 "$url" 2>/dev/null)"; then
if [[ "$warn_on_unreachable" -eq 1 ]]; then
warn "${domain} did not return response headers"
else
fail "${domain} did not return response headers"
fi
return
fi
local status
status="$(first_status_line "$headers")"
if [[ -n "$status" ]]; then
ok "${domain} responded (${status})"
else
warn "${domain} returned headers without an HTTP status line"
fi
local header
for header in "${HEADERS_TO_CHECK[@]}"; do
local count
count="$(count_header_lines "$headers" "$header")"
if [[ "$count" -gt 1 ]]; then
fail "${domain} has duplicate ${header} headers (${count})"
fi
done
}
main() {
local domains=()
if [[ "$#" -gt 0 && "$1" == "--all-from-latest-backup" ]]; then
warn_on_unreachable=1
while IFS= read -r domain; do
[[ -z "$domain" || "$domain" == *'*'* ]] && continue
domains+=("$domain")
done < <(get_domains_from_latest_backup)
elif [[ "$#" -gt 0 ]]; then
domains=("$@")
else
domains=("${DEFAULT_DOMAINS[@]}")
fi
for domain in "${domains[@]}"; do
check_domain "$domain"
done
if [[ "$failures" -gt 0 ]]; then
echo
fail "duplicate-header check failed for ${failures} condition(s)"
exit 1
fi
echo
ok "no duplicate security/cache headers detected"
}
main "$@"