chore: sync docs, config schemas, scripts, and meta task alignment

- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates

Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).

Made-with: Cursor
This commit is contained in:
defiQUG
2026-03-31 22:31:39 -07:00
parent 00880304d4
commit 7ac74f432b
948 changed files with 47476 additions and 490 deletions

View File

@@ -51,7 +51,7 @@ check_env() {
if $DRY_RUN; then
echo "=== Validation (--dry-run: would check) ==="
echo " REQUIRED_FILES: ${REQUIRED_FILES:-<default: config/ip-addresses.conf, .env.example, token-mapping*.json>}"
echo " REQUIRED_FILES: ${REQUIRED_FILES:-<default: config/ip-addresses.conf, .env.example, token-mapping*.json, gru-transport-active.json, gru-iso4217-currency-manifest.json>}"
echo " OPTIONAL_ENV: ${OPTIONAL_ENV:-<empty; set VALIDATE_OPTIONAL_ENV for Proxmox API vars>}"
exit 0
fi
@@ -63,6 +63,7 @@ if [[ -n "$REQUIRED_FILES" ]]; then
else
# Default: check common locations
[[ -d "$PROJECT_ROOT/config" ]] && check_file "$PROJECT_ROOT/config/ip-addresses.conf" || true
[[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && check_file "$PROJECT_ROOT/config/smart-contracts-master.json" || true
[[ -f "$PROJECT_ROOT/.env.example" ]] && log_ok ".env.example present (copy to .env and fill)" || true
# Token mapping (Chain 138 ↔ Mainnet): optional but validate structure if present
if [[ -f "$PROJECT_ROOT/config/token-mapping.json" ]]; then
@@ -91,6 +92,283 @@ else
fi
fi
fi
if [[ -f "$PROJECT_ROOT/config/gru-transport-active.json" ]]; then
log_ok "Found: config/gru-transport-active.json"
if command -v jq &>/dev/null; then
if jq -e '
(.system.name | type == "string")
and (.system.shortName | type == "string")
and (.enabledCanonicalTokens | type == "array")
and (.enabledDestinationChains | type == "array")
and (.approvedBridgePeers | type == "array")
and (.transportPairs | type == "array")
and (.publicPools | type == "array")
' "$PROJECT_ROOT/config/gru-transport-active.json" &>/dev/null; then
log_ok "gru-transport-active.json: top-level overlay structure is valid"
else
log_err "gru-transport-active.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v node &>/dev/null; then
if PROJECT_ROOT="$PROJECT_ROOT" node <<'NODE'
const fs = require('fs');
const path = require('path');
const projectRoot = process.env.PROJECT_ROOT;
function readJson(relativePath) {
return JSON.parse(fs.readFileSync(path.join(projectRoot, relativePath), 'utf8'));
}
function normalizeAddress(address) {
return typeof address === 'string' ? address.trim().toLowerCase() : '';
}
function isNonZeroAddress(address) {
const normalized = normalizeAddress(address);
return /^0x[a-f0-9]{40}$/.test(normalized) && normalized !== '0x0000000000000000000000000000000000000000';
}
function refConfigured(ref) {
return !!ref && typeof ref === 'object' && (
(typeof ref.address === 'string' && ref.address.trim() !== '') ||
(typeof ref.env === 'string' && ref.env.trim() !== '')
);
}
const active = readJson('config/gru-transport-active.json');
const multichain = readJson('config/token-mapping-multichain.json');
const deployment = readJson('cross-chain-pmm-lps/config/deployment-status.json');
const poolMatrix = readJson('cross-chain-pmm-lps/config/pool-matrix.json');
const currencyManifest = readJson('config/gru-iso4217-currency-manifest.json');
const errors = [];
const canonicalChainId = Number(active.system?.canonicalChainId ?? 138);
const enabledCanonicalTokens = Array.isArray(active.enabledCanonicalTokens) ? active.enabledCanonicalTokens : [];
const enabledCanonical = new Set(enabledCanonicalTokens.map((token) => String(token.symbol)));
const enabledChainsArray = Array.isArray(active.enabledDestinationChains) ? active.enabledDestinationChains : [];
const enabledChains = new Set(enabledChainsArray.map((chain) => Number(chain.chainId)));
const peersByKey = new Map((active.approvedBridgePeers || []).map((peer) => [String(peer.key), peer]));
const reserveVerifiers = active.reserveVerifiers && typeof active.reserveVerifiers === 'object'
? active.reserveVerifiers
: {};
const transportPairsByKey = new Map((active.transportPairs || []).map((pair) => [String(pair.key), pair]));
const publicPoolsByKey = new Map((active.publicPools || []).map((pool) => [String(pool.key), pool]));
const manifestByCode = new Map((currencyManifest.currencies || []).map((currency) => [String(currency.code), currency]));
function getMappingPair(fromChainId, toChainId) {
return (multichain.pairs || []).find(
(entry) => Number(entry.fromChainId) === Number(fromChainId) && Number(entry.toChainId) === Number(toChainId)
);
}
function getMappingToken(fromChainId, toChainId, mappingKey) {
const pair = getMappingPair(fromChainId, toChainId);
if (!pair) return null;
return (pair.tokens || []).find((token) => token.key === mappingKey) || null;
}
function getExpectedPoolKey(chainId, mirroredSymbol) {
const chain = poolMatrix.chains?.[String(chainId)];
const hubStable = typeof chain?.hubStable === 'string' ? chain.hubStable.trim() : '';
if (!hubStable) return null;
return `${chainId}-${mirroredSymbol}-${hubStable}`;
}
for (const chain of active.enabledDestinationChains || []) {
if (!peersByKey.has(String(chain.peerKey || ''))) {
errors.push(`enabledDestinationChains[${chain.chainId}] references missing peerKey ${chain.peerKey}`);
}
}
for (const token of enabledCanonicalTokens) {
const currency = manifestByCode.get(String(token.currencyCode || ''));
if (!currency) {
errors.push(`enabledCanonicalTokens[${token.symbol}] references missing currencyCode ${token.currencyCode} in gru-iso4217-currency-manifest.json`);
continue;
}
if (currency.status?.deployed !== true) {
errors.push(`enabledCanonicalTokens[${token.symbol}] requires manifest currency ${token.currencyCode} to be deployed`);
}
if (currency.status?.transportActive !== true) {
errors.push(`enabledCanonicalTokens[${token.symbol}] requires manifest currency ${token.currencyCode} to mark transportActive=true`);
}
}
for (const pair of active.transportPairs || []) {
const canonicalChainId = Number(pair.canonicalChainId ?? active.system?.canonicalChainId ?? 138);
const destinationChainId = Number(pair.destinationChainId);
const canonicalSymbol = String(pair.canonicalSymbol || '');
const mirroredSymbol = String(pair.mirroredSymbol || '');
if (!enabledCanonical.has(canonicalSymbol)) {
errors.push(`transportPairs[${pair.key}] uses canonicalSymbol ${canonicalSymbol} which is not enabled`);
}
if (!enabledChains.has(destinationChainId)) {
errors.push(`transportPairs[${pair.key}] uses destinationChainId ${destinationChainId} which is not enabled`);
}
const peer = peersByKey.get(String(pair.peerKey || ''));
if (!peer) {
errors.push(`transportPairs[${pair.key}] is missing approved bridge peer ${pair.peerKey}`);
} else {
if (!refConfigured(peer.l1Bridge)) {
errors.push(`approvedBridgePeers[${peer.key}] is missing l1Bridge wiring`);
}
if (!refConfigured(peer.l2Bridge)) {
errors.push(`approvedBridgePeers[${peer.key}] is missing l2Bridge wiring`);
}
}
const maxOutstanding = pair.maxOutstanding && typeof pair.maxOutstanding === 'object' ? pair.maxOutstanding : null;
if (!maxOutstanding || (!maxOutstanding.amount && !maxOutstanding.env)) {
errors.push(`transportPairs[${pair.key}] is missing maxOutstanding amount/env`);
}
const mappingToken = getMappingToken(canonicalChainId, destinationChainId, pair.mappingKey);
if (!mappingToken) {
errors.push(`transportPairs[${pair.key}] mappingKey ${pair.mappingKey} is missing from token-mapping-multichain.json`);
} else {
if (!isNonZeroAddress(mappingToken.addressFrom)) {
errors.push(`transportPairs[${pair.key}] has invalid canonical addressFrom in token-mapping-multichain.json`);
}
if (!isNonZeroAddress(mappingToken.addressTo)) {
errors.push(`transportPairs[${pair.key}] mapping exists but cW pair is not deployed (addressTo missing/zero)`);
}
}
const deploymentChain = deployment.chains?.[String(destinationChainId)];
const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol];
if (!deploymentChain || !isNonZeroAddress(deployedMirror)) {
errors.push(`transportPairs[${pair.key}] mapping exists but deployment-status.json has no deployed ${mirroredSymbol} for chain ${destinationChainId}`);
} else if (mappingToken && normalizeAddress(deployedMirror) !== normalizeAddress(mappingToken.addressTo)) {
errors.push(`transportPairs[${pair.key}] deployment-status.json ${mirroredSymbol} does not match token-mapping-multichain.json addressTo`);
}
if ((pair.publicPoolKeys || []).length > 0) {
for (const publicPoolKey of pair.publicPoolKeys) {
if (!publicPoolsByKey.has(String(publicPoolKey))) {
errors.push(`transportPairs[${pair.key}] references missing public pool key ${publicPoolKey}`);
}
}
}
if (pair.reserveVerifierKey) {
const verifier = reserveVerifiers[pair.reserveVerifierKey];
if (!verifier) {
errors.push(`transportPairs[${pair.key}] requires missing reserve verifier ${pair.reserveVerifierKey}`);
} else {
if (!refConfigured(verifier.bridgeRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} is missing bridgeRef wiring`);
}
if (!refConfigured(verifier.verifierRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} is missing verifierRef wiring`);
}
if (verifier.requireVaultBacking && !refConfigured(verifier.vaultRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} requires vault backing but vaultRef is unset`);
}
if (verifier.requireReserveSystemBalance && !refConfigured(verifier.reserveSystemRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} requires reserve-system balance checks but reserveSystemRef is unset`);
}
}
}
}
for (const pool of active.publicPools || []) {
if (pool.active === true) {
if (!isNonZeroAddress(pool.poolAddress)) {
errors.push(`publicPools[${pool.key}] is active but has no poolAddress`);
continue;
}
const deploymentChain = deployment.chains?.[String(pool.chainId)];
const deployedPools = Array.isArray(deploymentChain?.pmmPools) ? deploymentChain.pmmPools : [];
const deploymentMatch = deployedPools.some((entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress));
if (!deploymentMatch) {
errors.push(`publicPools[${pool.key}] is active but deployment-status.json does not contain its poolAddress`);
}
}
}
for (const [chainIdKey, deploymentChain] of Object.entries(deployment.chains || {})) {
const destinationChainId = Number(chainIdKey);
if (destinationChainId === canonicalChainId) continue;
if (deploymentChain?.bridgeAvailable !== true) continue;
const mappingPair = getMappingPair(canonicalChainId, destinationChainId);
if (!mappingPair) continue;
let compatible = true;
for (const token of enabledCanonicalTokens) {
const mappingKey = String(token.mappingKey || '');
const mirroredSymbol = String(token.mirroredSymbol || '');
const mappingToken = mappingKey ? (mappingPair.tokens || []).find((entry) => entry.key === mappingKey) : null;
const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol];
const expectedPoolKey = getExpectedPoolKey(destinationChainId, mirroredSymbol);
if (
!mappingKey ||
!mappingToken ||
!isNonZeroAddress(mappingToken.addressTo) ||
!isNonZeroAddress(deployedMirror) ||
normalizeAddress(mappingToken.addressTo) !== normalizeAddress(deployedMirror) ||
!expectedPoolKey
) {
compatible = false;
break;
}
}
if (!compatible) continue;
const enabledChain = enabledChainsArray.find((chain) => Number(chain.chainId) === destinationChainId);
if (!enabledChain) {
errors.push(`compatible destination chain ${destinationChainId} (${deploymentChain?.name || 'unknown'}) is missing from enabledDestinationChains`);
continue;
}
for (const token of enabledCanonicalTokens) {
const expectedPairKey = `${canonicalChainId}-${destinationChainId}-${token.symbol}-${token.mirroredSymbol}`;
const expectedPoolKey = getExpectedPoolKey(destinationChainId, String(token.mirroredSymbol || ''));
const pair = transportPairsByKey.get(expectedPairKey);
if (!pair) {
errors.push(`compatible destination chain ${destinationChainId} is missing transport pair ${expectedPairKey}`);
continue;
}
if (expectedPoolKey && !publicPoolsByKey.has(expectedPoolKey)) {
errors.push(`compatible destination chain ${destinationChainId} is missing public pool placeholder ${expectedPoolKey}`);
}
if (expectedPoolKey && !(pair.publicPoolKeys || []).includes(expectedPoolKey)) {
errors.push(`transportPairs[${pair.key}] must include the pool-matrix first-hop key ${expectedPoolKey}`);
}
}
}
if (errors.length > 0) {
console.error(errors.join('\n'));
process.exit(1);
}
NODE
then
log_ok "gru-transport-active.json: overlay cross-checks passed"
else
log_err "gru-transport-active.json: overlay cross-checks failed"
ERRORS=$((ERRORS + 1))
fi
else
log_err "Node.js is required to validate gru-transport-active.json cross-file wiring"
ERRORS=$((ERRORS + 1))
fi
else
log_err "Missing config/gru-transport-active.json"
ERRORS=$((ERRORS + 1))
fi
[[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && log_ok "Found: config/smart-contracts-master.json" || true
# Token lists (Uniswap format): validate structure if present
for list in token-lists/lists/dbis-138.tokenlist.json token-lists/lists/cronos.tokenlist.json token-lists/lists/all-mainnet.tokenlist.json; do
@@ -154,6 +432,79 @@ else
log_err "Missing config/proxmox-operational-template.json"
ERRORS=$((ERRORS + 1))
fi
if [[ -f "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json" ]]; then
log_ok "Found: config/gru-iso4217-currency-manifest.json"
if command -v jq &>/dev/null; then
if jq -e '
(.name | type == "string")
and (.version | type == "string")
and (.updated | type == "string")
and (.canonicalChainId | type == "number")
and (.currencies | type == "array")
and ((.currencies | length) > 0)
and ((.currencies | map(.code) | unique | length) == (.currencies | length))
and (
all(.currencies[];
(.code | type == "string")
and ((.code | length) >= 3)
and (.name | type == "string")
and (.type == "fiat" or .type == "commodity")
and ((.minorUnits == null) or (.minorUnits | type == "number"))
and (.status.planned | type == "boolean")
and (.status.deployed | type == "boolean")
and (.status.transportActive | type == "boolean")
and (.status.x402Ready | type == "boolean")
and (.canonicalAssets | type == "object")
)
)
' "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json" &>/dev/null; then
log_ok "gru-iso4217-currency-manifest.json: top-level manifest structure is valid"
else
log_err "gru-iso4217-currency-manifest.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
fi
fi
if [[ -f "$PROJECT_ROOT/config/gru-standards-profile.json" ]]; then
log_ok "Found: config/gru-standards-profile.json"
if command -v jq &>/dev/null; then
if jq -e '
(.name | type == "string")
and (.profileId | type == "string")
and (.version | type == "string")
and (.updated | type == "string")
and (.canonicalChainId | type == "number")
and (.scope | type == "object")
and (.paymentProfiles | type == "array")
and (.baseTokenStandards | type == "array")
and (.transportAndWrapperStandards | type == "array")
and (.governanceAndPolicyStandards | type == "array")
' "$PROJECT_ROOT/config/gru-standards-profile.json" &>/dev/null; then
log_ok "gru-standards-profile.json: top-level standards profile structure is valid"
else
log_err "gru-standards-profile.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
if jq -e '
(.canonicalChainId == $activeChain)
and (.canonicalChainId == $manifestChain)
and (.references.transportOverlay == "config/gru-transport-active.json")
and (.references.currencyManifest == "config/gru-iso4217-currency-manifest.json")
' \
--argjson activeChain "$(jq -r '.system.canonicalChainId' "$PROJECT_ROOT/config/gru-transport-active.json")" \
--argjson manifestChain "$(jq -r '.canonicalChainId' "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json")" \
"$PROJECT_ROOT/config/gru-standards-profile.json" &>/dev/null; then
log_ok "gru-standards-profile.json: canonical-chain and reference wiring matches active overlay + currency manifest"
else
log_err "gru-standards-profile.json: canonical-chain or reference wiring does not match active overlay / currency manifest"
ERRORS=$((ERRORS + 1))
fi
fi
else
log_err "Missing config/gru-standards-profile.json"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -n "$OPTIONAL_ENV" ]]; then
@@ -162,6 +513,34 @@ if [[ -n "$OPTIONAL_ENV" ]]; then
done
fi
# DBIS institutional Digital Master Plan example JSON
if [[ -f "$PROJECT_ROOT/config/dbis-institutional/examples/trust.json" ]] && [[ -x "$SCRIPT_DIR/validate-dbis-institutional-json.sh" ]]; then
if bash "$SCRIPT_DIR/validate-dbis-institutional-json.sh" &>/dev/null; then
log_ok "DBIS institutional examples (JSON parse)"
else
log_err "DBIS institutional examples failed JSON parse"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v check-jsonschema &>/dev/null && [[ -x "$SCRIPT_DIR/validate-dbis-institutional-schemas.sh" ]]; then
if SCHEMA_STRICT=1 bash "$SCRIPT_DIR/validate-dbis-institutional-schemas.sh" &>/dev/null; then
log_ok "DBIS institutional JSON Schemas (settlement-event, address-registry-entry)"
else
log_err "DBIS institutional JSON Schema validation failed (pip install check-jsonschema)"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && command -v jq &>/dev/null && [[ -x "$SCRIPT_DIR/validate-explorer-chain138-inventory.sh" ]]; then
if bash "$SCRIPT_DIR/validate-explorer-chain138-inventory.sh" &>/dev/null; then
log_ok "Explorer address-inventory Chain 138 vs smart-contracts-master.json"
else
log_err "Explorer address-inventory Chain 138 drift (see validate-explorer-chain138-inventory.sh)"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ $ERRORS -gt 0 ]]; then
log_err "Validation failed with $ERRORS error(s). Set VALIDATE_REQUIRED_FILES='path1 path2' to require specific files."
exit 1

View File

@@ -0,0 +1,389 @@
#!/usr/bin/env bash
# Validate the DBIS identity completion package.
# Usage:
# bash scripts/validation/validate-dbis-identity-package.sh \
# --package config/production/dbis-identity-public-did-package.json \
# --secrets config/production/dbis-identity-public-did-secrets.env
#
# For template validation only:
# bash scripts/validation/validate-dbis-identity-package.sh \
# --package config/production/dbis-identity-public-did-package.example.json \
# --secrets config/production/dbis-identity-public-did-secrets.example.env \
# --allow-placeholders
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
PACKAGE_PATH="$PROJECT_ROOT/config/production/dbis-identity-public-did-package.json"
SECRETS_PATH="$PROJECT_ROOT/config/production/dbis-identity-public-did-secrets.env"
ALLOW_PLACEHOLDERS=false
PARTIAL_EXTERNAL_ALLOWED=false
log_info() { echo "[INFO] $1"; }
log_ok() { echo "[OK] $1"; }
log_warn() { echo "[WARN] $1"; }
log_err() { echo "[ERROR] $1"; }
while [[ $# -gt 0 ]]; do
case "$1" in
--package)
PACKAGE_PATH="$2"
shift 2
;;
--secrets)
SECRETS_PATH="$2"
shift 2
;;
--allow-placeholders)
ALLOW_PLACEHOLDERS=true
shift
;;
*)
log_err "Unknown argument: $1"
exit 1
;;
esac
done
ERRORS=0
WARNINGS=0
require_file() {
local path="$1"
if [[ -f "$path" ]]; then
log_ok "Found: $path"
else
log_err "Missing file: $path"
ERRORS=$((ERRORS + 1))
fi
}
check_placeholder_string() {
local label="$1"
local value="$2"
if [[ -z "$value" ]]; then
log_err "$label is empty"
ERRORS=$((ERRORS + 1))
return
fi
if [[ "$value" == *"<fill-me"* || "$value" == "CHANGEME" || "$value" == "TODO" ]]; then
if $ALLOW_PLACEHOLDERS; then
log_warn "$label still contains a placeholder"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label still contains a placeholder"
ERRORS=$((ERRORS + 1))
fi
else
log_ok "$label is populated"
fi
}
check_placeholder_string_maybe_partial() {
local label="$1"
local value="$2"
if $PARTIAL_EXTERNAL_ALLOWED; then
if [[ -z "$value" ]]; then
log_warn "$label is empty while package is awaiting external endorser data"
WARNINGS=$((WARNINGS + 1))
return
fi
if [[ "$value" == *"<fill-me"* || "$value" == "CHANGEME" || "$value" == "TODO" ]]; then
log_warn "$label still contains a placeholder while package is awaiting external endorser data"
WARNINGS=$((WARNINGS + 1))
return
fi
log_ok "$label is populated"
return
fi
check_placeholder_string "$label" "$value"
}
check_indy_did_format() {
local label="$1"
local value="$2"
if [[ "$value" == *"<fill-me"* || "$value" == "CHANGEME" || "$value" == "TODO" ]]; then
if $ALLOW_PLACEHOLDERS || $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "$label still contains a placeholder"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label still contains a placeholder"
ERRORS=$((ERRORS + 1))
fi
return
fi
if [[ -z "$value" ]]; then
if $ALLOW_PLACEHOLDERS || $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "$label is empty while package is awaiting external endorser data"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label is empty"
ERRORS=$((ERRORS + 1))
fi
return
fi
if [[ "$value" =~ ^[1-9A-HJ-NP-Za-km-z]{16,32}$ ]]; then
log_ok "$label format looks like an Indy DID"
else
if $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "$label does not yet look like a valid Indy DID"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label does not look like a valid Indy DID"
ERRORS=$((ERRORS + 1))
fi
fi
}
check_uuid_like() {
local label="$1"
local value="$2"
if [[ "$value" == *"<fill-me"* || "$value" == "CHANGEME" || "$value" == "TODO" ]]; then
if $ALLOW_PLACEHOLDERS || $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "$label still contains a placeholder"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label still contains a placeholder"
ERRORS=$((ERRORS + 1))
fi
return
fi
if [[ -z "$value" ]]; then
if $ALLOW_PLACEHOLDERS || $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "$label is empty while package is awaiting external endorser data"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label is empty"
ERRORS=$((ERRORS + 1))
fi
return
fi
if [[ "$value" =~ ^[0-9a-fA-F-]{16,}$ ]]; then
log_ok "$label format looks connection-id compatible"
else
if $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "$label does not yet look like a valid connection identifier"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label does not look like a valid connection identifier"
ERRORS=$((ERRORS + 1))
fi
fi
}
check_change_control_ref() {
local value="$1"
if [[ "$value" =~ ^DBIS-ID-GOV-[0-9]{4}-[0-9]{3}$ ]]; then
log_ok "governance.changeControlRef format is valid"
else
log_err "governance.changeControlRef must match DBIS-ID-GOV-YYYY-NNN"
ERRORS=$((ERRORS + 1))
fi
}
check_quorum_format() {
local value="$1"
if [[ "$value" =~ ^[0-9]+-of-[0-9]+$ ]]; then
log_ok "governance.endorserGovernanceModel.quorum format is valid"
else
log_err "governance.endorserGovernanceModel.quorum must match N-of-M"
ERRORS=$((ERRORS + 1))
fi
}
check_env_var() {
local label="$1"
local name="$2"
local value="${!name:-}"
if [[ -z "$value" ]]; then
if $ALLOW_PLACEHOLDERS; then
log_warn "$label env var $name is empty"
WARNINGS=$((WARNINGS + 1))
else
log_err "$label env var $name is empty"
ERRORS=$((ERRORS + 1))
fi
return
fi
check_placeholder_string "$label env var $name" "$value"
}
require_file "$PACKAGE_PATH"
require_file "$SECRETS_PATH"
if ! command -v jq >/dev/null 2>&1; then
log_err "jq is required"
exit 1
fi
if [[ $ERRORS -gt 0 ]]; then
exit 1
fi
if jq -e '
(.schemaVersion | type == "string") and
(.programId | type == "string") and
(.packageStatus | type == "string") and
(.ariesAgent.adminUrl | type == "string") and
(.ariesAgent.didcommUrl | type == "string") and
(.ariesAgent.walletType | type == "string") and
(.ariesAgent.adminAuthMode | type == "string") and
(.ariesAgent.adminApiKeyEnv | type == "string") and
(.ledger.type | type == "string") and
(.ledger.targetNetwork | type == "string") and
(.ledger.trustScope | type == "string") and
(.ledger.poolName | type == "string") and
(.ledger.genesisSource | type == "string") and
(.ledger.didMethod | type == "string") and
(.ledger.nymWriteMode | type == "string") and
(.governance.governanceVersion | type == "string") and
(.governance.changeControlRef | type == "string") and
(.governance.changeControlFormat | type == "string") and
(.governance.operatorOwner | type == "string") and
(.governance.approvalOwner | type == "string") and
(.governance.endorserGovernanceModel.type | type == "string") and
(.governance.endorserGovernanceModel.quorum | type == "string") and
(.governance.endorserGovernanceModel.custodians | type == "array") and
(.governance.endorserGovernanceModel.custodians | length >= 3) and
(.governance.endorserGovernanceModel.singleKeyDidControl | type == "string") and
(.governance.endorserGovernanceModel.currentPhase | type == "string") and
(.governance.endorserGovernanceModel.futurePhases | type == "array") and
(.governance.endorserGovernanceModel.futurePhases | length >= 1) and
(.roles.author.alias | type == "string") and
(.roles.author.connectionIdEnv | type == "string") and
(.roles.endorser.alias | type == "string") and
(.roles.endorser.did | type == "string") and
(.roles.endorser.connectionIdEnv | type == "string") and
(.anoncreds.schemas | type == "array") and
(.anoncreds.schemas | length >= 1) and
(.anoncreds.verificationProfiles | type == "array") and
(.anoncreds.verificationProfiles | length >= 1) and
(.evidence.outputDir | type == "string") and
(.evidence.requiredArtifacts | type == "array") and
(.evidence.requiredArtifacts | length >= 1)
' "$PACKAGE_PATH" >/dev/null; then
log_ok "Package JSON structure is valid"
else
log_err "Package JSON structure is invalid"
ERRORS=$((ERRORS + 1))
fi
PACKAGE_STATUS="$(jq -r '.packageStatus' "$PACKAGE_PATH")"
if [[ "$PACKAGE_STATUS" == "awaiting-external-endorser" ]]; then
PARTIAL_EXTERNAL_ALLOWED=true
log_info "Package status allows external-governance gaps to remain warnings"
fi
check_placeholder_string "schemaVersion" "$(jq -r '.schemaVersion' "$PACKAGE_PATH")"
check_placeholder_string "programId" "$(jq -r '.programId' "$PACKAGE_PATH")"
check_placeholder_string "ariesAgent.adminUrl" "$(jq -r '.ariesAgent.adminUrl' "$PACKAGE_PATH")"
check_placeholder_string "ariesAgent.didcommUrl" "$(jq -r '.ariesAgent.didcommUrl' "$PACKAGE_PATH")"
check_placeholder_string "ariesAgent.adminAuthMode" "$(jq -r '.ariesAgent.adminAuthMode' "$PACKAGE_PATH")"
check_placeholder_string "ledger.targetNetwork" "$(jq -r '.ledger.targetNetwork' "$PACKAGE_PATH")"
check_placeholder_string "ledger.trustScope" "$(jq -r '.ledger.trustScope' "$PACKAGE_PATH")"
check_placeholder_string "ledger.poolName" "$(jq -r '.ledger.poolName' "$PACKAGE_PATH")"
check_placeholder_string "ledger.genesisSource" "$(jq -r '.ledger.genesisSource' "$PACKAGE_PATH")"
check_placeholder_string "ledger.didMethod" "$(jq -r '.ledger.didMethod' "$PACKAGE_PATH")"
check_placeholder_string "ledger.nymWriteMode" "$(jq -r '.ledger.nymWriteMode' "$PACKAGE_PATH")"
check_placeholder_string "governance.governanceVersion" "$(jq -r '.governance.governanceVersion' "$PACKAGE_PATH")"
CHANGE_CONTROL_REF="$(jq -r '.governance.changeControlRef' "$PACKAGE_PATH")"
check_placeholder_string "governance.changeControlRef" "$CHANGE_CONTROL_REF"
check_change_control_ref "$CHANGE_CONTROL_REF"
check_placeholder_string "governance.changeControlFormat" "$(jq -r '.governance.changeControlFormat' "$PACKAGE_PATH")"
check_placeholder_string "governance.operatorOwner" "$(jq -r '.governance.operatorOwner' "$PACKAGE_PATH")"
check_placeholder_string "governance.approvalOwner" "$(jq -r '.governance.approvalOwner' "$PACKAGE_PATH")"
check_placeholder_string "governance.endorserGovernanceModel.type" "$(jq -r '.governance.endorserGovernanceModel.type' "$PACKAGE_PATH")"
GOV_QUORUM="$(jq -r '.governance.endorserGovernanceModel.quorum' "$PACKAGE_PATH")"
check_placeholder_string "governance.endorserGovernanceModel.quorum" "$GOV_QUORUM"
check_quorum_format "$GOV_QUORUM"
check_placeholder_string "governance.endorserGovernanceModel.singleKeyDidControl" "$(jq -r '.governance.endorserGovernanceModel.singleKeyDidControl' "$PACKAGE_PATH")"
check_placeholder_string "governance.endorserGovernanceModel.currentPhase" "$(jq -r '.governance.endorserGovernanceModel.currentPhase' "$PACKAGE_PATH")"
if jq -e '(.governance.endorserGovernanceModel.custodians | type == "array") and (.governance.endorserGovernanceModel.custodians | length >= 3)' "$PACKAGE_PATH" >/dev/null; then
log_ok "governance.endorserGovernanceModel.custodians has at least 3 entries"
else
log_err "governance.endorserGovernanceModel.custodians must have at least 3 entries"
ERRORS=$((ERRORS + 1))
fi
if jq -e '(.governance.endorserGovernanceModel.futurePhases | type == "array") and (.governance.endorserGovernanceModel.futurePhases | length >= 1)' "$PACKAGE_PATH" >/dev/null; then
log_ok "governance.endorserGovernanceModel.futurePhases is populated"
else
log_err "governance.endorserGovernanceModel.futurePhases must contain at least one entry"
ERRORS=$((ERRORS + 1))
fi
check_placeholder_string "roles.author.alias" "$(jq -r '.roles.author.alias' "$PACKAGE_PATH")"
AUTHOR_PUBLIC_DID="$(jq -r '.roles.author.publicDid' "$PACKAGE_PATH")"
ENDORSER_DID="$(jq -r '.roles.endorser.did' "$PACKAGE_PATH")"
check_placeholder_string_maybe_partial "roles.author.publicDid" "$AUTHOR_PUBLIC_DID"
check_placeholder_string_maybe_partial "roles.author.verkey" "$(jq -r '.roles.author.verkey' "$PACKAGE_PATH")"
check_placeholder_string "roles.endorser.alias" "$(jq -r '.roles.endorser.alias' "$PACKAGE_PATH")"
check_placeholder_string_maybe_partial "roles.endorser.did" "$ENDORSER_DID"
check_placeholder_string "anoncreds.schemas[0].name" "$(jq -r '.anoncreds.schemas[0].name' "$PACKAGE_PATH")"
check_placeholder_string "anoncreds.schemas[0].version" "$(jq -r '.anoncreds.schemas[0].version' "$PACKAGE_PATH")"
if [[ -n "$AUTHOR_PUBLIC_DID" ]]; then
check_indy_did_format "roles.author.publicDid" "$AUTHOR_PUBLIC_DID"
fi
if [[ -n "$ENDORSER_DID" && "$ENDORSER_DID" != *"<fill-me"* ]]; then
check_indy_did_format "roles.endorser.did" "$ENDORSER_DID"
fi
GENESIS_SOURCE="$(jq -r '.ledger.genesisSource' "$PACKAGE_PATH")"
if [[ "$GENESIS_SOURCE" == /* ]]; then
if [[ -f "$GENESIS_SOURCE" ]]; then
log_ok "genesisSource file exists: $GENESIS_SOURCE"
else
log_warn "genesisSource file not present on this machine: $GENESIS_SOURCE"
WARNINGS=$((WARNINGS + 1))
fi
fi
set -a
source "$SECRETS_PATH"
set +a
AUTHOR_ENV_NAME="$(jq -r '.roles.author.connectionIdEnv' "$PACKAGE_PATH")"
ENDORSER_ENV_NAME="$(jq -r '.roles.endorser.connectionIdEnv' "$PACKAGE_PATH")"
ADMIN_KEY_ENV_NAME="$(jq -r '.ariesAgent.adminApiKeyEnv' "$PACKAGE_PATH")"
ADMIN_AUTH_MODE="$(jq -r '.ariesAgent.adminAuthMode' "$PACKAGE_PATH")"
if [[ "$ADMIN_AUTH_MODE" == "apiKey" ]]; then
check_env_var "Configured admin API key" "$ADMIN_KEY_ENV_NAME"
else
log_info "Skipping admin API key requirement because adminAuthMode=$ADMIN_AUTH_MODE"
fi
if [[ -n "${!AUTHOR_ENV_NAME:-}" ]]; then
check_env_var "Author connection" "$AUTHOR_ENV_NAME"
check_uuid_like "Author connection" "${!AUTHOR_ENV_NAME}"
else
log_warn "Author connection env var $AUTHOR_ENV_NAME is empty"
WARNINGS=$((WARNINGS + 1))
fi
if [[ -n "${!ENDORSER_ENV_NAME:-}" ]]; then
check_env_var "Endorser connection" "$ENDORSER_ENV_NAME"
check_uuid_like "Endorser connection" "${!ENDORSER_ENV_NAME}"
else
if $PARTIAL_EXTERNAL_ALLOWED; then
log_warn "Endorser connection env var $ENDORSER_ENV_NAME is empty while package is awaiting external endorser data"
WARNINGS=$((WARNINGS + 1))
else
log_err "Endorser connection env var $ENDORSER_ENV_NAME is empty"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ $ERRORS -gt 0 ]]; then
log_err "Identity package validation failed with $ERRORS error(s) and $WARNINGS warning(s)"
exit 1
fi
if [[ $WARNINGS -gt 0 ]]; then
log_warn "Identity package validation passed with $WARNINGS warning(s)"
else
log_ok "Identity package validation passed"
fi

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
# Validate JSON syntax for DBIS institutional examples (no ajv required).
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
EX="$ROOT/config/dbis-institutional/examples"
for f in "$EX"/*.json; do
python3 -m json.tool "$f" >/dev/null
echo "OK $f"
done
echo "All institutional example JSON files parse."

View File

@@ -0,0 +1,64 @@
#!/usr/bin/env bash
# Validate dbis-institutional examples against JSON Schemas (draft 2020-12).
# Uses `check-jsonschema` when available (pip install check-jsonschema).
# In CI, install first: pip install check-jsonschema
#
# Env:
# SCHEMA_STRICT=1 exit 1 if check-jsonschema is missing (default: skip with 0)
#
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SCHEMA_DIR="$ROOT/config/dbis-institutional/schemas"
EX_DIR="$ROOT/config/dbis-institutional/examples"
# validate_json_array EX_FILE SCHEMA_FILE LABEL
validate_json_array() {
local ex_file="$1" schema_file="$2" label="$3"
if [[ ! -f "$ex_file" ]]; then
return 0
fi
if ! command -v jq &>/dev/null; then
echo "error: jq is required for $label validation" >&2
exit 1
fi
local n
n=$(jq 'length' "$ex_file")
if [[ "${n:-0}" -lt 1 ]]; then
echo "error: $ex_file must be a non-empty array" >&2
exit 1
fi
local batch_tmp idx
batch_tmp="$(mktemp)"
trap 'rm -f "$batch_tmp"' RETURN
idx=0
while IFS= read -r line; do
echo "$line" >"$batch_tmp"
check-jsonschema --schemafile "$schema_file" "$batch_tmp"
idx=$((idx + 1))
done < <(jq -c '.[]' "$ex_file")
echo "OK $label ($idx items)"
rm -f "$batch_tmp"
trap - RETURN
}
if ! command -v check-jsonschema &>/dev/null; then
if [[ "${SCHEMA_STRICT:-0}" == "1" ]]; then
echo "error: check-jsonschema not found; pip install check-jsonschema" >&2
exit 1
fi
echo "skip: check-jsonschema not installed (pip install check-jsonschema); JSON parse still covered by validate-dbis-institutional-json.sh"
exit 0
fi
check-jsonschema --schemafile "$SCHEMA_DIR/settlement-event.schema.json" "$EX_DIR/settlement-event.example.json"
check-jsonschema --schemafile "$SCHEMA_DIR/settlement-event.schema.json" "$EX_DIR/settlement-event.chain138-primary.example.json"
check-jsonschema --schemafile "$SCHEMA_DIR/settlement-event.schema.json" "$EX_DIR/settlement-event.min.json"
validate_json_array "$EX_DIR/settlement-events-batch.example.json" "$SCHEMA_DIR/settlement-event.schema.json" "settlement-events-batch"
check-jsonschema --schemafile "$SCHEMA_DIR/address-registry-entry.schema.json" "$EX_DIR/address-registry-entry.example.json"
validate_json_array "$EX_DIR/address-registry-entries-batch.example.json" "$SCHEMA_DIR/address-registry-entry.schema.json" "address-registry-entries-batch"
check-jsonschema --schemafile "$SCHEMA_DIR/trust.schema.json" "$EX_DIR/trust.json"
check-jsonschema --schemafile "$SCHEMA_DIR/governance.schema.json" "$EX_DIR/governance.json"
check-jsonschema --schemafile "$SCHEMA_DIR/policy-manifest.schema.json" "$EX_DIR/policy.json"
echo "OK dbis-institutional schema validation (settlement-event, settlement-event.chain138-primary, settlement-events-batch, address-registry-entry, address-registry-entries-batch, trust, governance, policy-manifest)"

View File

@@ -0,0 +1,62 @@
#!/usr/bin/env bash
# Compare explorer-monorepo Chain 138 keys in address-inventory.json to
# config/smart-contracts-master.json (G3 drift guard).
# Usage: bash scripts/validation/validate-explorer-chain138-inventory.sh
# Requires: jq
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
MASTER="${PROJECT_ROOT}/config/smart-contracts-master.json"
INV="${PROJECT_ROOT}/explorer-monorepo/config/address-inventory.json"
norm() { echo "$1" | tr '[:upper:]' '[:lower:]'; }
if ! command -v jq &>/dev/null; then
echo "[WARN] jq not installed; skip explorer Chain 138 inventory alignment check"
exit 0
fi
if [[ ! -f "$MASTER" ]]; then
echo "[ERROR] Missing $MASTER"
exit 1
fi
if [[ ! -f "$INV" ]]; then
echo "[WARN] Missing $INV; skip explorer inventory check"
exit 0
fi
ERR=0
expect_match() {
local key="$1"
local jqpath="$2"
local exp
exp=$(jq -r "$jqpath" "$MASTER")
local got
got=$(jq -r --arg k "$key" '.inventory[$k] // empty' "$INV")
if [[ -z "$got" ]]; then
echo "[ERROR] inventory missing key: $key"
ERR=$((ERR + 1))
return
fi
if [[ "$(norm "$exp")" != "$(norm "$got")" ]]; then
echo "[ERROR] $key mismatch: inventory=$got master=$exp"
ERR=$((ERR + 1))
fi
}
expect_match "CCIP_ROUTER_138" '.chains["138"].contracts.CCIP_Router'
expect_match "CCIP_ROUTER_ADDRESS" '.chains["138"].contracts.CCIP_Router'
expect_match "CCIPWETH9_BRIDGE_138" '.chains["138"].contracts.CCIPWETH9_Bridge'
expect_match "CCIPWETH9_BRIDGE" '.chains["138"].contracts.CCIPWETH9_Bridge'
expect_match "LINK_TOKEN_138" '.chains["138"].contracts.LINK'
expect_match "ISO20022_ROUTER" '.chains["138"].contracts.ISO20022Router'
if [[ $ERR -gt 0 ]]; then
echo "[ERROR] Explorer address-inventory Chain 138 drift ($ERR). Update explorer-monorepo/config/address-inventory.json or smart-contracts-master.json."
exit 1
fi
echo "[OK] Explorer address-inventory Chain 138 keys match smart-contracts-master.json"

View File

@@ -0,0 +1,51 @@
#!/usr/bin/env bash
# Validate JVMTM / regulatory closure example JSON against local schemas (draft 2020-12).
# Uses check-jsonschema when available; SCHEMA_STRICT=1 fails if missing.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
BASE="$ROOT/config/jvmtm-regulatory-closure"
SCHEMA="$BASE/schemas"
EX="$BASE/examples"
HAVE_CHECK_JSONSCHEMA=1
if ! command -v check-jsonschema &>/dev/null; then
HAVE_CHECK_JSONSCHEMA=0
if [[ "${SCHEMA_STRICT:-0}" == "1" ]]; then
echo "error: check-jsonschema not found; pip install check-jsonschema" >&2
exit 1
fi
echo "skip: check-jsonschema not installed (pip install check-jsonschema)"
fi
validate_pair() {
local schema_file="$1" example_file="$2"
check-jsonschema --schemafile "$schema_file" "$example_file"
}
if [[ "$HAVE_CHECK_JSONSCHEMA" == "1" ]]; then
validate_pair "$SCHEMA/daily-3way-reconciliation-report.schema.json" "$EX/daily-3way-reconciliation-report.example.json"
validate_pair "$SCHEMA/three-way-reconciliation-result.schema.json" "$EX/three-way-reconciliation-result.example.json"
validate_pair "$SCHEMA/prefunding-proof.schema.json" "$EX/prefunding-proof.example.json"
validate_pair "$SCHEMA/pre-settlement-ack.schema.json" "$EX/pre-settlement-ack.example.json"
validate_pair "$SCHEMA/sample-exception-event.schema.json" "$EX/sample-exception-event.example.json"
validate_pair "$SCHEMA/kyt-screening-result.schema.json" "$EX/kyt-screening-result.example.json"
validate_pair "$SCHEMA/recovery-time-report.schema.json" "$EX/recovery-time-report.example.json"
validate_pair "$SCHEMA/dr-simulation-report.schema.json" "$EX/dr-simulation-report.example.json"
validate_pair "$SCHEMA/real-time-balance-snapshot.schema.json" "$EX/real-time-balance-snapshot.example.json"
validate_pair "$SCHEMA/transaction-compliance-execution.schema.json" "$EX/transaction-compliance-execution.example.json"
validate_pair "$SCHEMA/transaction-compliance-execution.schema.json" "$EX/transaction-compliance-execution.blocked.example.json"
fi
if ! command -v python3 &>/dev/null; then
echo "error: python3 not found; required for JVMTM transaction-grade pack validation" >&2
exit 1
fi
python3 "$ROOT/scripts/validation/validate-jvmtm-transaction-compliance-pack.py"
if [[ "$HAVE_CHECK_JSONSCHEMA" == "1" ]]; then
echo "OK jvmtm-regulatory-closure schema validation (11 example/schema pairs + transaction-grade pack checks)"
else
echo "OK jvmtm-regulatory-closure transaction-grade pack validation (schema checks skipped: check-jsonschema not installed)"
fi

View File

@@ -0,0 +1,341 @@
#!/usr/bin/env python3
"""Validate the JVMTM transaction-grade compliance pack."""
from __future__ import annotations
import csv
import io
import json
import sys
from pathlib import Path
RAIL_MODES = {"chain138-primary", "swift", "hybrid", "internal-only"}
BLOCKING_LEVELS = {"HARD_STOP", "ESCALATE", "POST_EVENT"}
DECISION_STATUSES = {"READY", "BLOCKED", "ESCALATE"}
CONTROL_STATUSES = {"PASS", "FAIL", "PENDING", "WAIVED"}
EVIDENCE_REF_TYPES = {"repo-path", "runtime-slot", "archive-path", "external-ref"}
REQUIRED_CONTROL_FIELDS = [
"control_id",
"phase",
"domain",
"requirement",
"validation_method",
"blocking_level",
"applies_to_rail",
"source_audit_rows",
"repo_evidence_artifacts",
"validator_command",
"failure_action",
"high_value_override",
"notes",
]
CSV_FIELDNAMES = [
"control_id",
"phase",
"domain",
"requirement",
"validation_method",
"blocking_level",
"applies_to_rail",
"source_audit_rows",
"repo_evidence_artifacts",
"validator_command",
"failure_action",
"high_value_override",
"notes",
]
def fail(message: str) -> None:
raise SystemExit(f"error: {message}")
def load_json(path: Path) -> dict:
try:
return json.loads(path.read_text(encoding="utf-8"))
except FileNotFoundError:
fail(f"missing JSON file: {path}")
except json.JSONDecodeError as exc:
fail(f"invalid JSON in {path}: {exc}")
def format_artifacts(artifacts: list[dict[str, str]]) -> str:
return " | ".join(f'{artifact["artifact_type"]}:{artifact["ref"]}' for artifact in artifacts)
def render_csv(matrix: dict) -> str:
buffer = io.StringIO(newline="")
writer = csv.DictWriter(buffer, fieldnames=CSV_FIELDNAMES, lineterminator="\n")
writer.writeheader()
for control in matrix["controls"]:
writer.writerow(
{
"control_id": control["control_id"],
"phase": control["phase"],
"domain": control["domain"],
"requirement": control["requirement"],
"validation_method": control["validation_method"],
"blocking_level": control["blocking_level"],
"applies_to_rail": " | ".join(control["applies_to_rail"]),
"source_audit_rows": " | ".join(control["source_audit_rows"]),
"repo_evidence_artifacts": format_artifacts(control["repo_evidence_artifacts"]),
"validator_command": control["validator_command"],
"failure_action": control["failure_action"],
"high_value_override": control["high_value_override"],
"notes": control["notes"],
}
)
return buffer.getvalue()
def validate_evidence_ref(ref: dict, label: str) -> None:
if not isinstance(ref, dict):
fail(f"{label} must be an object")
for key in ("artifact_type", "ref"):
if key not in ref or not isinstance(ref[key], str) or not ref[key].strip():
fail(f"{label} missing non-empty {key}")
if ref["artifact_type"] not in EVIDENCE_REF_TYPES:
fail(f"{label} uses unsupported artifact_type {ref['artifact_type']}")
if "sha256" in ref:
sha256 = ref["sha256"]
if not isinstance(sha256, str) or len(sha256) != 64 or any(c not in "0123456789abcdefABCDEF" for c in sha256):
fail(f"{label} sha256 must be a 64-character hex string")
def validate_pack_reference(ref: dict, label: str, repo_root: Path, slot_refs: set[str]) -> None:
validate_evidence_ref(ref, label)
artifact_type = ref["artifact_type"]
target = ref["ref"]
if artifact_type == "repo-path":
if not (repo_root / target).exists():
fail(f"{label} repo-path does not exist: {target}")
elif artifact_type == "runtime-slot":
if target not in slot_refs:
fail(f"{label} runtime-slot does not exist in the matrix: {target}")
def validate_execution_example(
path: Path,
control_ids: set[str],
expected_status: str,
matrix_version: str,
repo_root: Path,
slot_refs: set[str],
) -> None:
payload = load_json(path)
required_top_level = [
"schema_version",
"matrix_version",
"transaction_id",
"correlation_id",
"rail_mode",
"amount",
"currency",
"decision_status",
"decision_reason",
"validated_at",
"approved_by",
"instruction_ref",
"control_results",
]
for field in required_top_level:
if field not in payload:
fail(f"{path} missing required field {field}")
if payload["decision_status"] not in DECISION_STATUSES:
fail(f"{path} uses unsupported decision_status {payload['decision_status']}")
if payload["rail_mode"] not in RAIL_MODES:
fail(f"{path} uses unsupported rail_mode {payload['rail_mode']}")
if payload["decision_status"] != expected_status:
fail(f"{path} decision_status expected {expected_status} but found {payload['decision_status']}")
if payload["matrix_version"] != matrix_version:
fail(f"{path} matrix_version {payload['matrix_version']} does not match canonical matrix_version {matrix_version}")
validate_pack_reference(payload["instruction_ref"], f"{path}:instruction_ref", repo_root, slot_refs)
if "settlement_event_ref" in payload:
validate_pack_reference(payload["settlement_event_ref"], f"{path}:settlement_event_ref", repo_root, slot_refs)
if not isinstance(payload["control_results"], list) or not payload["control_results"]:
fail(f"{path} control_results must be a non-empty array")
seen = set()
for index, result in enumerate(payload["control_results"]):
label = f"{path}:control_results[{index}]"
if not isinstance(result, dict):
fail(f"{label} must be an object")
for key in ("control_id", "status", "blocking", "validated_at", "validator_ref", "evidence_refs"):
if key not in result:
fail(f"{label} missing required field {key}")
control_id = result["control_id"]
if control_id not in control_ids:
fail(f"{label} references unknown control_id {control_id}")
if control_id in seen:
fail(f"{path} repeats control_id {control_id}")
seen.add(control_id)
if result["status"] not in CONTROL_STATUSES:
fail(f"{label} uses unsupported status {result['status']}")
if result["blocking"] not in BLOCKING_LEVELS:
fail(f"{label} uses unsupported blocking value {result['blocking']}")
if not isinstance(result["evidence_refs"], list) or not result["evidence_refs"]:
fail(f"{label} evidence_refs must be a non-empty array")
for ref_index, evidence_ref in enumerate(result["evidence_refs"]):
validate_pack_reference(evidence_ref, f"{label}:evidence_refs[{ref_index}]", repo_root, slot_refs)
if expected_status == "READY":
if "settlement_event_ref" not in payload:
fail(f"{path} must include settlement_event_ref for the READY example")
statuses = {result["control_id"]: result["status"] for result in payload["control_results"]}
if statuses.get("PT-02") != "PASS" or statuses.get("TX-02") != "PASS":
fail(f"{path} must show PT-02 and TX-02 as PASS for READY examples")
if expected_status == "BLOCKED":
if "settlement_event_ref" in payload:
fail(f"{path} should omit settlement_event_ref for the BLOCKED pre-execution example")
statuses = {result["control_id"]: result["status"] for result in payload["control_results"]}
if statuses.get("PT-02") != "FAIL":
fail(f"{path} must show PT-02 as FAIL for BLOCKED examples")
if statuses.get("TX-02") not in {"FAIL", "PENDING"}:
fail(f"{path} must show TX-02 as FAIL or PENDING for BLOCKED examples")
def main() -> int:
repo_root = Path(__file__).resolve().parents[2]
config_dir = repo_root / "config/jvmtm-regulatory-closure"
matrix_path = config_dir / "transaction-compliance-matrix.json"
csv_path = config_dir / "transaction-compliance-matrix.csv"
markdown_path = config_dir / "JVMTM_TRANSACTION_GRADE_COMPLIANCE_MATRIX.md"
schema_path = config_dir / "schemas/transaction-compliance-execution.schema.json"
ready_example_path = config_dir / "examples/transaction-compliance-execution.example.json"
blocked_example_path = config_dir / "examples/transaction-compliance-execution.blocked.example.json"
for path in (matrix_path, csv_path, markdown_path, schema_path, ready_example_path, blocked_example_path):
if not path.exists():
fail(f"missing required pack file: {path}")
matrix = load_json(matrix_path)
if matrix.get("schema_version") != 1:
fail(f"{matrix_path} schema_version must equal 1")
if not isinstance(matrix.get("matrix_version"), str) or not matrix["matrix_version"]:
fail(f"{matrix_path} matrix_version must be a non-empty string")
if not isinstance(matrix.get("runtime_slots"), list) or not matrix["runtime_slots"]:
fail(f"{matrix_path} runtime_slots must be a non-empty array")
if not isinstance(matrix.get("controls"), list) or not matrix["controls"]:
fail(f"{matrix_path} controls must be a non-empty array")
if matrix.get("canonical_format") != "json":
fail(f"{matrix_path} canonical_format must equal 'json'")
if matrix.get("csv_export") != "config/jvmtm-regulatory-closure/transaction-compliance-matrix.csv":
fail(f"{matrix_path} csv_export must point to the canonical CSV path")
if not isinstance(matrix.get("source_baseline"), list) or not matrix["source_baseline"]:
fail(f"{matrix_path} source_baseline must be a non-empty array")
for baseline_ref in matrix["source_baseline"]:
if not isinstance(baseline_ref, str) or not baseline_ref.strip():
fail(f"{matrix_path} contains an invalid source_baseline entry")
if not (repo_root / baseline_ref).exists():
fail(f"{matrix_path} source_baseline path does not exist: {baseline_ref}")
slot_refs: set[str] = set()
for index, slot in enumerate(matrix["runtime_slots"]):
if not isinstance(slot, dict):
fail(f"{matrix_path} runtime_slots[{index}] must be an object")
for key in ("slot", "source", "archive_path", "description"):
if key not in slot or not isinstance(slot[key], str) or not slot[key].strip():
fail(f"{matrix_path} runtime_slots[{index}] missing non-empty {key}")
if slot["slot"] in slot_refs:
fail(f"{matrix_path} repeats runtime slot {slot['slot']}")
slot_refs.add(slot["slot"])
control_ids: set[str] = set()
for index, control in enumerate(matrix["controls"]):
label = f"{matrix_path}:controls[{index}]"
if not isinstance(control, dict):
fail(f"{label} must be an object")
for field in REQUIRED_CONTROL_FIELDS:
if field not in control:
fail(f"{label} missing field {field}")
control_id = control["control_id"]
if not isinstance(control_id, str) or not control_id.strip():
fail(f"{label} control_id must be a non-empty string")
if control_id in control_ids:
fail(f"{matrix_path} repeats control_id {control_id}")
control_ids.add(control_id)
if control["blocking_level"] not in BLOCKING_LEVELS:
fail(f"{label} uses unsupported blocking_level {control['blocking_level']}")
if not isinstance(control["applies_to_rail"], list) or not control["applies_to_rail"]:
fail(f"{label} applies_to_rail must be a non-empty array")
if any(rail not in RAIL_MODES for rail in control["applies_to_rail"]):
fail(f"{label} uses unsupported rail mode")
if not isinstance(control["source_audit_rows"], list) or not control["source_audit_rows"]:
fail(f"{label} source_audit_rows must be a non-empty array")
artifacts = control["repo_evidence_artifacts"]
if not isinstance(artifacts, list) or not artifacts:
fail(f"{label} repo_evidence_artifacts must be a non-empty array")
for artifact_index, artifact in enumerate(artifacts):
if not isinstance(artifact, dict):
fail(f"{label}:repo_evidence_artifacts[{artifact_index}] must be an object")
for key in ("artifact_type", "ref"):
if key not in artifact or not isinstance(artifact[key], str) or not artifact[key].strip():
fail(f"{label}:repo_evidence_artifacts[{artifact_index}] missing non-empty {key}")
artifact_type = artifact["artifact_type"]
ref = artifact["ref"]
if artifact_type == "repo-path":
if not (repo_root / ref).exists():
fail(f"{label}:repo_evidence_artifacts[{artifact_index}] repo-path does not exist: {ref}")
elif artifact_type == "runtime-slot":
if ref not in slot_refs:
fail(f"{label}:repo_evidence_artifacts[{artifact_index}] unknown runtime slot: {ref}")
else:
fail(f"{label}:repo_evidence_artifacts[{artifact_index}] unsupported artifact_type {artifact_type}")
expected_csv = render_csv(matrix)
actual_csv = csv_path.read_text(encoding="utf-8")
if actual_csv != expected_csv:
fail(
"transaction-compliance-matrix.csv is out of date with transaction-compliance-matrix.json; "
"run scripts/jvmtm/export-transaction-compliance-matrix-csv.py"
)
actual_rows = [line for line in actual_csv.splitlines() if line.strip()]
expected_row_count = len(matrix["controls"]) + 1
if len(actual_rows) != expected_row_count:
fail(
f"{csv_path} row count mismatch: expected {expected_row_count} including header, "
f"found {len(actual_rows)}"
)
markdown_text = markdown_path.read_text(encoding="utf-8")
if matrix["title"] not in markdown_text:
fail(f"{markdown_path} does not contain the canonical matrix title: {matrix['title']}")
missing_markdown_controls = [control_id for control_id in control_ids if control_id not in markdown_text]
if missing_markdown_controls:
fail(
f"{markdown_path} is missing control ids present in the canonical matrix: "
f"{', '.join(sorted(missing_markdown_controls))}"
)
validate_execution_example(
ready_example_path,
control_ids,
"READY",
matrix["matrix_version"],
repo_root,
slot_refs,
)
validate_execution_example(
blocked_example_path,
control_ids,
"BLOCKED",
matrix["matrix_version"],
repo_root,
slot_refs,
)
print(
"OK jvmtm transaction-grade compliance pack "
f"({len(control_ids)} controls, {len(slot_refs)} runtime slots, CSV synchronized)"
)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
# Validate 3FR reserve provenance package JSON files against schemas/reserve-provenance-package.schema.json
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
PKG="$ROOT/config/reserve-provenance-package"
SCHEMA="$PKG/schemas/reserve-provenance-package.schema.json"
if ! command -v check-jsonschema &>/dev/null; then
if [[ "${SCHEMA_STRICT:-0}" == "1" ]]; then
echo "error: check-jsonschema not found; pip install check-jsonschema" >&2
exit 1
fi
for f in "$PKG"/legal/*.json "$PKG"/settlement/*.json "$PKG"/provenance/*.json "$PKG"/bank/*.json "$PKG"/kyt/*.json "$PKG"/reconciliation/*.json "$PKG"/reserve/*.json "$PKG"/governance/*.json; do
[[ -f "$f" ]] || continue
[[ "$f" == *.example.json ]] && continue
python3 -m json.tool "$f" >/dev/null
echo "OK parse $f"
done
echo "skip: check-jsonschema not installed (JSON parse only)"
exit 0
fi
for f in "$PKG"/legal/*.json "$PKG"/settlement/*.json "$PKG"/provenance/*.json "$PKG"/bank/*.json "$PKG"/kyt/*.json "$PKG"/reconciliation/*.json "$PKG"/reserve/*.json "$PKG"/governance/*.json; do
[[ -f "$f" ]] || continue
[[ "$f" == *.example.json ]] && continue
check-jsonschema --schemafile "$SCHEMA" "$f"
done
echo "OK reserve-provenance-package (10 JSON files + schema)"