Sync workspace: config, docs, scripts, CI, operator rules, and submodule pointers.

- Update dbis_core, cross-chain-pmm-lps, explorer-monorepo, metamask-integration, pr-workspace/chains
- Omit embedded publish git dirs and empty placeholders from index

Made-with: Cursor
This commit is contained in:
defiQUG
2026-04-12 06:12:20 -07:00
parent 6fb6bd3993
commit dbd517b279
2935 changed files with 327972 additions and 5533 deletions

View File

@@ -104,6 +104,9 @@ else
and (.approvedBridgePeers | type == "array")
and (.transportPairs | type == "array")
and (.publicPools | type == "array")
and ((.gasAssetFamilies == null) or (.gasAssetFamilies | type == "array"))
and ((.gasRedeemGroups == null) or (.gasRedeemGroups | type == "array"))
and ((.gasProtocolExposure == null) or (.gasProtocolExposure | type == "array"))
' "$PROJECT_ROOT/config/gru-transport-active.json" &>/dev/null; then
log_ok "gru-transport-active.json: top-level overlay structure is valid"
else
@@ -144,6 +147,7 @@ const multichain = readJson('config/token-mapping-multichain.json');
const deployment = readJson('cross-chain-pmm-lps/config/deployment-status.json');
const poolMatrix = readJson('cross-chain-pmm-lps/config/pool-matrix.json');
const currencyManifest = readJson('config/gru-iso4217-currency-manifest.json');
const monetaryUnitManifest = readJson('config/gru-monetary-unit-manifest.json');
const errors = [];
@@ -159,6 +163,15 @@ const reserveVerifiers = active.reserveVerifiers && typeof active.reserveVerifie
const transportPairsByKey = new Map((active.transportPairs || []).map((pair) => [String(pair.key), pair]));
const publicPoolsByKey = new Map((active.publicPools || []).map((pool) => [String(pool.key), pool]));
const manifestByCode = new Map((currencyManifest.currencies || []).map((currency) => [String(currency.code), currency]));
const monetaryUnitsByCode = new Map((monetaryUnitManifest.monetaryUnits || []).map((unit) => [String(unit.code), unit]));
const gasFamiliesByKey = new Map(((active.gasAssetFamilies || [])).map((family) => [String(family.familyKey), family]));
const gasFamiliesBySymbol = new Map(
(active.gasAssetFamilies || []).flatMap((family) => [
[String(family.canonicalSymbol138), family],
[String(family.mirroredSymbol), family],
])
);
const gasProtocolExposureByKey = new Map((active.gasProtocolExposure || []).map((row) => [String(row.key), row]));
function getMappingPair(fromChainId, toChainId) {
return (multichain.pairs || []).find(
@@ -186,6 +199,30 @@ for (const chain of active.enabledDestinationChains || []) {
}
for (const token of enabledCanonicalTokens) {
const registryFamily = String(token.registryFamily || '').trim();
if (registryFamily === 'gas_native') {
const gasFamily = gasFamiliesByKey.get(String(token.familyKey || '')) || gasFamiliesBySymbol.get(String(token.symbol || ''));
if (!gasFamily) {
errors.push(`enabledCanonicalTokens[${token.symbol}] references missing gas family ${token.familyKey}`);
continue;
}
if (String(gasFamily.canonicalSymbol138 || '') !== String(token.symbol || '')) {
errors.push(`enabledCanonicalTokens[${token.symbol}] must match gasAssetFamilies canonicalSymbol138`);
}
continue;
}
if (registryFamily === 'monetary_unit') {
const monetaryUnit = monetaryUnitsByCode.get(String(token.currencyCode || ''));
if (!monetaryUnit) {
errors.push(`enabledCanonicalTokens[${token.symbol}] references missing monetary unit ${token.currencyCode} in gru-monetary-unit-manifest.json`);
continue;
}
if (String(monetaryUnit.canonicalSymbol || '') !== String(token.symbol || '')) {
errors.push(`enabledCanonicalTokens[${token.symbol}] must match gru-monetary-unit-manifest.json canonicalSymbol`);
}
continue;
}
const currency = manifestByCode.get(String(token.currencyCode || ''));
if (!currency) {
errors.push(`enabledCanonicalTokens[${token.symbol}] references missing currencyCode ${token.currencyCode} in gru-iso4217-currency-manifest.json`);
@@ -243,13 +280,40 @@ for (const pair of active.transportPairs || []) {
}
const deploymentChain = deployment.chains?.[String(destinationChainId)];
const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol];
const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol] || deploymentChain?.gasMirrors?.[mirroredSymbol];
if (!deploymentChain || !isNonZeroAddress(deployedMirror)) {
errors.push(`transportPairs[${pair.key}] mapping exists but deployment-status.json has no deployed ${mirroredSymbol} for chain ${destinationChainId}`);
} else if (mappingToken && normalizeAddress(deployedMirror) !== normalizeAddress(mappingToken.addressTo)) {
errors.push(`transportPairs[${pair.key}] deployment-status.json ${mirroredSymbol} does not match token-mapping-multichain.json addressTo`);
}
if (pair.assetClass === 'gas_native') {
const gasFamily =
gasFamiliesByKey.get(String(pair.familyKey || '')) ||
gasFamiliesBySymbol.get(canonicalSymbol) ||
gasFamiliesBySymbol.get(mirroredSymbol);
if (!gasFamily) {
errors.push(`transportPairs[${pair.key}] references missing gas family ${pair.familyKey}`);
} else {
if (String(gasFamily.canonicalSymbol138 || '') !== canonicalSymbol) {
errors.push(`transportPairs[${pair.key}] canonicalSymbol must match gas family canonicalSymbol138`);
}
if (String(gasFamily.mirroredSymbol || '') !== mirroredSymbol) {
errors.push(`transportPairs[${pair.key}] mirroredSymbol must match gas family mirroredSymbol`);
}
if (String(gasFamily.backingMode || '') !== String(pair.backingMode || '')) {
errors.push(`transportPairs[${pair.key}] backingMode must match gas family`);
}
if (!(gasFamily.originChains || []).map(Number).includes(destinationChainId)) {
errors.push(`transportPairs[${pair.key}] destinationChainId ${destinationChainId} not allowed by gas family`);
}
}
if (!pair.protocolExposureKey || !gasProtocolExposureByKey.has(String(pair.protocolExposureKey))) {
errors.push(`transportPairs[${pair.key}] is missing gas protocol exposure wiring`);
}
}
if ((pair.publicPoolKeys || []).length > 0) {
for (const publicPoolKey of pair.publicPoolKeys) {
if (!publicPoolsByKey.has(String(publicPoolKey))) {
@@ -286,14 +350,79 @@ for (const pool of active.publicPools || []) {
continue;
}
const deploymentChain = deployment.chains?.[String(pool.chainId)];
const deployedPools = Array.isArray(deploymentChain?.pmmPools) ? deploymentChain.pmmPools : [];
const deploymentMatch = deployedPools.some((entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress));
if (!deploymentMatch) {
const deployedStable = Array.isArray(deploymentChain?.pmmPools) ? deploymentChain.pmmPools : [];
const deployedVolatile = Array.isArray(deploymentChain?.pmmPoolsVolatile) ? deploymentChain.pmmPoolsVolatile : [];
const deployedGas = Array.isArray(deploymentChain?.gasPmmPools) ? deploymentChain.gasPmmPools : [];
const deploymentMatch = [...deployedStable, ...deployedVolatile].some(
(entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress)
);
const gasDeploymentMatch = deployedGas.some(
(entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress)
);
const stagedPlaceholder = String(pool.phase || '').toLowerCase().includes('staged');
if (!deploymentMatch && !gasDeploymentMatch && !stagedPlaceholder) {
errors.push(`publicPools[${pool.key}] is active but deployment-status.json does not contain its poolAddress`);
}
}
}
for (const family of active.gasAssetFamilies || []) {
if (!family.familyKey || !family.canonicalSymbol138 || !family.mirroredSymbol) {
errors.push(`gasAssetFamilies entry is missing familyKey/canonicalSymbol138/mirroredSymbol`);
continue;
}
if (!Array.isArray(family.originChains) || family.originChains.length === 0) {
errors.push(`gasAssetFamilies[${family.familyKey}] must declare originChains`);
}
if (!multichain.gasAssetRegistry || !Array.isArray(multichain.gasAssetRegistry.families)) {
errors.push(`multichain gasAssetRegistry is missing while gas families are enabled`);
continue;
}
const multichainFamily = multichain.gasAssetRegistry.families.find(
(entry) => String(entry.familyKey) === String(family.familyKey)
);
if (!multichainFamily) {
errors.push(`gasAssetFamilies[${family.familyKey}] is missing from token-mapping-multichain.json gasAssetRegistry`);
continue;
}
if (String(multichainFamily.canonicalSymbol138) !== String(family.canonicalSymbol138)) {
errors.push(`gasAssetFamilies[${family.familyKey}] canonicalSymbol138 mismatch between transport-active and token-mapping-multichain`);
}
if (String(multichainFamily.mirroredSymbol) !== String(family.mirroredSymbol)) {
errors.push(`gasAssetFamilies[${family.familyKey}] mirroredSymbol mismatch between transport-active and token-mapping-multichain`);
}
}
for (const group of active.gasRedeemGroups || []) {
const family = gasFamiliesByKey.get(String(group.familyKey || ''));
if (!family) {
errors.push(`gasRedeemGroups[${group.key}] references missing family ${group.familyKey}`);
continue;
}
const allowedChains = Array.isArray(group.allowedChains) ? group.allowedChains.map(Number) : [];
if (String(group.redeemPolicy || '') === 'family_fungible_inventory_gated') {
const familyOriginChains = (family.originChains || []).map(Number).sort((a, b) => a - b);
const sortedAllowed = [...allowedChains].sort((a, b) => a - b);
if (JSON.stringify(sortedAllowed) !== JSON.stringify(familyOriginChains)) {
errors.push(`gasRedeemGroups[${group.key}] must match the full origin chain set for ${family.familyKey}`);
}
}
}
for (const exposure of active.gasProtocolExposure || []) {
const family = gasFamiliesByKey.get(String(exposure.familyKey || ''));
if (!family) {
errors.push(`gasProtocolExposure[${exposure.key}] references missing family ${exposure.familyKey}`);
continue;
}
const oneInch = exposure.oneInch || {};
const uniswap = exposure.uniswapV3 || {};
const dodo = exposure.dodoPmm || {};
if ((oneInch.routingVisible === true || oneInch.live === true) && !(uniswap.live === true && dodo.active === true)) {
errors.push(`gasProtocolExposure[${exposure.key}] cannot enable 1inch before DODO and Uniswap are live`);
}
}
for (const [chainIdKey, deploymentChain] of Object.entries(deployment.chains || {})) {
const destinationChainId = Number(chainIdKey);
if (destinationChainId === canonicalChainId) continue;
@@ -303,7 +432,7 @@ for (const [chainIdKey, deploymentChain] of Object.entries(deployment.chains ||
if (!mappingPair) continue;
let compatible = true;
for (const token of enabledCanonicalTokens) {
for (const token of enabledCanonicalTokens.filter((entry) => entry.registryFamily !== 'gas_native')) {
const mappingKey = String(token.mappingKey || '');
const mirroredSymbol = String(token.mirroredSymbol || '');
const mappingToken = mappingKey ? (mappingPair.tokens || []).find((entry) => entry.key === mappingKey) : null;
@@ -331,7 +460,7 @@ for (const [chainIdKey, deploymentChain] of Object.entries(deployment.chains ||
continue;
}
for (const token of enabledCanonicalTokens) {
for (const token of enabledCanonicalTokens.filter((entry) => entry.registryFamily !== 'gas_native')) {
const expectedPairKey = `${canonicalChainId}-${destinationChainId}-${token.symbol}-${token.mirroredSymbol}`;
const expectedPoolKey = getExpectedPoolKey(destinationChainId, String(token.mirroredSymbol || ''));
const pair = transportPairsByKey.get(expectedPairKey);
@@ -419,6 +548,33 @@ NODE
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_PUBLIC_DEPLOYMENT_STATUS.json" ]] && command -v jq &>/dev/null; then
if jq -e '(.summary | type == "object") and (.protocols.publicCwMesh | type == "array") and (.transport.wave1 | type == "array")' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_PUBLIC_DEPLOYMENT_STATUS.json" &>/dev/null; then
log_ok "GRU_V2_PUBLIC_DEPLOYMENT_STATUS valid"
else
log_err "GRU_V2_PUBLIC_DEPLOYMENT_STATUS invalid"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -f "$PROJECT_ROOT/config/gru-v2-public-protocol-rollout-plan.json" ]] && command -v jq &>/dev/null; then
if jq -e '(.protocols | type == "array") and (.protocols | length > 0)' "$PROJECT_ROOT/config/gru-v2-public-protocol-rollout-plan.json" &>/dev/null; then
log_ok "gru-v2-public-protocol-rollout-plan.json valid"
else
log_err "gru-v2-public-protocol-rollout-plan.json invalid"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_DEPLOYMENT_QUEUE.json" ]] && command -v jq &>/dev/null; then
if jq -e '(.summary | type == "object") and (.assetQueue | type == "array") and (.chainQueue | type == "array") and (.protocolQueue | type == "array")' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_DEPLOYMENT_QUEUE.json" &>/dev/null; then
log_ok "GRU_V2_DEPLOYMENT_QUEUE valid"
else
log_err "GRU_V2_DEPLOYMENT_QUEUE invalid"
ERRORS=$((ERRORS + 1))
fi
fi
# Public-sector program manifest (served by phoenix-deploy-api GET /api/v1/public-sector/programs)
if [[ -f "$PROJECT_ROOT/config/public-sector-program-manifest.json" ]]; then
log_ok "Found: config/public-sector-program-manifest.json"
@@ -599,6 +755,42 @@ if [[ -d "$PROJECT_ROOT/config/xdc-zero" ]] && [[ -x "$SCRIPT_DIR/validate-xdc-z
fi
fi
if [[ -d "$PROJECT_ROOT/config/xdc-zero" ]] && [[ -x "$SCRIPT_DIR/validate-xdc-zero-relayer-env.sh" ]]; then
if bash "$SCRIPT_DIR/validate-xdc-zero-relayer-env.sh" &>/dev/null; then
log_ok "config/xdc-zero relayer env/default examples"
else
log_err "config/xdc-zero relayer env/default validation failed (validate-xdc-zero-relayer-env.sh)"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -x "$SCRIPT_DIR/validate-economics-strategy-json.sh" ]]; then
if bash "$SCRIPT_DIR/validate-economics-strategy-json.sh" &>/dev/null; then
log_ok "economics-toolkit strategy JSON (smoke + template; optional check-jsonschema)"
else
log_err "economics-toolkit strategy JSON validation failed (see scripts/validation/validate-economics-strategy-json.sh; run pnpm run economics:build)"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v python3 &>/dev/null; then
for f in \
"$PROJECT_ROOT/scripts/it-ops/compute_ipam_drift.py" \
"$PROJECT_ROOT/scripts/it-ops/lib/collect_inventory_remote.py" \
"$PROJECT_ROOT/scripts/it-ops/persist-it-snapshot-sqlite.py" \
"$PROJECT_ROOT/services/sankofa-it-read-api/server.py"
do
if [[ -f "$f" ]]; then
if python3 -m py_compile "$f" &>/dev/null; then
log_ok "Python syntax: ${f#$PROJECT_ROOT/}"
else
log_err "Python syntax failed: $f"
ERRORS=$((ERRORS + 1))
fi
fi
done
fi
if [[ $ERRORS -gt 0 ]]; then
log_err "Validation failed with $ERRORS error(s). Set VALIDATE_REQUIRED_FILES='path1 path2' to require specific files."
exit 1

View File

@@ -47,6 +47,10 @@ if ! command -v check-jsonschema &>/dev/null; then
exit 1
fi
echo "skip: check-jsonschema not installed (pip install check-jsonschema); JSON parse still covered by validate-dbis-institutional-json.sh"
if command -v node &>/dev/null && [[ -f "$ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" ]]; then
node "$ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" || exit 1
echo "OK web3_eth_iban (XE) examples only (no schema tool)"
fi
exit 0
fi
@@ -61,4 +65,11 @@ validate_json_array "$EX_DIR/address-registry-entries-batch.example.json" "$SCHE
check-jsonschema --schemafile "$SCHEMA_DIR/trust.schema.json" "$EX_DIR/trust.json"
check-jsonschema --schemafile "$SCHEMA_DIR/governance.schema.json" "$EX_DIR/governance.json"
check-jsonschema --schemafile "$SCHEMA_DIR/policy-manifest.schema.json" "$EX_DIR/policy.json"
echo "OK dbis-institutional schema validation (settlement-event, settlement-event.chain138-primary, settlement-events-batch, address-registry-entry, address-registry-entries-batch, trust, governance, policy-manifest)"
if command -v node &>/dev/null && [[ -f "$ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" ]]; then
node "$ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" || exit 1
else
echo "skip: node or validate-address-registry-xe-aliases.mjs missing (XE alias check)"
fi
echo "OK dbis-institutional schema validation (settlement-event, settlement-event.chain138-primary, settlement-events-batch, address-registry-entry, address-registry-entries-batch, trust, governance, policy-manifest, web3_eth_iban examples)"

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
# Validate economics toolkit strategy JSON files (parse-only via CLI).
# Optional: check-jsonschema against packages/economics-toolkit/config/strategy.schema.json when installed.
# Usage: bash scripts/validation/validate-economics-strategy-json.sh
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
CLI="$PROJECT_ROOT/packages/economics-toolkit/dist/cli.js"
SCHEMA="$PROJECT_ROOT/packages/economics-toolkit/config/strategy.schema.json"
if [[ ! -f "$CLI" ]]; then
pnpm run economics:build
fi
FILES=(
"$PROJECT_ROOT/packages/economics-toolkit/config/strategy-smoke.json"
"$PROJECT_ROOT/packages/economics-toolkit/config/strategy-template.json"
)
for f in "${FILES[@]}"; do
if [[ ! -f "$f" ]]; then
echo "[ERROR] Missing $f" >&2
exit 1
fi
node "$CLI" strategy validate --file "$f" --quiet
done
if command -v check-jsonschema &>/dev/null && [[ -f "$SCHEMA" ]]; then
for f in "${FILES[@]}"; do
check-jsonschema --schemafile "$SCHEMA" "$f" >/dev/null
done
fi
exit 0

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env python3
"""Validate global compliance matrix index and jurisdiction drill-down manifests."""
from __future__ import annotations
import json
import sys
from pathlib import Path
def fail(message: str) -> None:
raise SystemExit(f"error: {message}")
def load_json(path: Path) -> dict:
try:
return json.loads(path.read_text(encoding="utf-8"))
except FileNotFoundError:
fail(f"missing JSON file: {path}")
except json.JSONDecodeError as exc:
fail(f"invalid JSON in {path}: {exc}")
def require_non_empty_str(obj: dict, key: str, label: str) -> str:
val = obj.get(key)
if not isinstance(val, str) or not val.strip():
fail(f"{label} missing non-empty string {key}")
return val.strip()
def check_repo_path(repo_root: Path, rel: str, label: str) -> None:
p = repo_root / rel
if not p.is_file():
fail(f"{label} path is not a file: {rel}")
def validate_package(repo_root: Path, pkg: dict, label: str) -> None:
if not isinstance(pkg, dict):
fail(f"{label} must be an object")
require_non_empty_str(pkg, "package_id", label)
require_non_empty_str(pkg, "kind", label)
require_non_empty_str(pkg, "title", label)
for key in (
"canonical_matrix_json",
"spreadsheet_csv",
):
if key in pkg and pkg[key] is not None:
rel = pkg[key]
if not isinstance(rel, str) or not rel.strip():
fail(f"{label}.{key} must be a non-empty string when set")
check_repo_path(repo_root, rel.strip(), f"{label}.{key}")
for list_key in (
"human_readable_md",
"policy_md",
"entry_point_md",
"index_md",
):
if list_key not in pkg:
continue
items = pkg[list_key]
if not isinstance(items, list) or not items:
fail(f"{label}.{list_key} must be a non-empty array when present")
for i, rel in enumerate(items):
if not isinstance(rel, str) or not rel.strip():
fail(f"{label}.{list_key}[{i}] must be a non-empty string")
check_repo_path(repo_root, rel.strip(), f"{label}.{list_key}[{i}]")
def validate_jurisdiction_manifest(repo_root: Path, path: Path) -> None:
data = load_json(path)
if data.get("schema_version") != 1:
fail(f"{path} schema_version must be 1")
jid = require_non_empty_str(data, "jurisdiction_id", str(path))
require_non_empty_str(data, "display_name", str(path))
iso = data.get("iso_3166_alpha2")
if iso is not None and iso != "":
if not isinstance(iso, str) or len(iso) != 2 or not iso.isalpha():
fail(f"{path} iso_3166_alpha2 must be null or two letters")
engagements = data.get("audit_engagements")
if not isinstance(engagements, list):
fail(f"{path} audit_engagements must be an array")
for i, eng in enumerate(engagements):
el = f"{path}:audit_engagements[{i}]"
if not isinstance(eng, dict):
fail(f"{el} must be an object")
if "closure_matrix" in eng and eng["closure_matrix"]:
rel = eng["closure_matrix"]
if not isinstance(rel, str):
fail(f"{el}.closure_matrix must be a string")
check_repo_path(repo_root, rel.strip(), f"{el}.closure_matrix")
regulators = data.get("regulators")
if not isinstance(regulators, list) or not regulators:
fail(f"{path} regulators must be a non-empty array")
for i, reg in enumerate(regulators):
if not isinstance(reg, dict):
fail(f"{path}:regulators[{i}] must be an object")
require_non_empty_str(reg, "short", f"{path}:regulators[{i}]")
require_non_empty_str(reg, "full_name", f"{path}:regulators[{i}]")
packages = data.get("compliance_packages")
if not isinstance(packages, list) or not packages:
fail(f"{path} compliance_packages must be a non-empty array")
for i, pkg in enumerate(packages):
validate_package(repo_root, pkg, f"{path}:compliance_packages[{i}]")
if path.stem != jid:
fail(
f"{path} file name must be {jid}.json (stem {path.stem!r} != jurisdiction_id {jid!r})"
)
def main() -> int:
repo_root = Path(__file__).resolve().parents[2]
index_path = repo_root / "config/compliance-matrix/global-compliance-matrix-index.json"
index = load_json(index_path)
if index.get("schema_version") != 1:
fail(f"{index_path} schema_version must be 1")
require_non_empty_str(index, "title", str(index_path))
require_non_empty_str(index, "matrix_version", str(index_path))
rows = index.get("jurisdictions")
if not isinstance(rows, list) or not rows:
fail(f"{index_path} jurisdictions must be a non-empty array")
seen_ids: set[str] = set()
for i, row in enumerate(rows):
label = f"{index_path}:jurisdictions[{i}]"
if not isinstance(row, dict):
fail(f"{label} must be an object")
jid = require_non_empty_str(row, "jurisdiction_id", label)
if jid in seen_ids:
fail(f"{label} duplicate jurisdiction_id {jid}")
seen_ids.add(jid)
require_non_empty_str(row, "display_name", label)
require_non_empty_str(row, "status", label)
manifest_rel = require_non_empty_str(row, "jurisdiction_manifest", label)
manifest_path = repo_root / manifest_rel
if not manifest_path.is_file():
fail(f"{label} jurisdiction_manifest not found: {manifest_rel}")
iso = row.get("iso_3166_alpha2")
if iso is not None and iso != "":
if not isinstance(iso, str) or len(iso) != 2 or not iso.isalpha():
fail(f"{label} iso_3166_alpha2 must be null or two letters")
inner = load_json(manifest_path)
if inner.get("jurisdiction_id") != jid:
fail(f"{manifest_path} jurisdiction_id {inner.get('jurisdiction_id')!r} != index {jid!r}")
inner_iso = inner.get("iso_3166_alpha2")
if inner_iso != iso:
fail(
f"{manifest_path} iso_3166_alpha2 {inner_iso!r} must match index row {iso!r} "
f"for jurisdiction_id {jid}"
)
validate_jurisdiction_manifest(repo_root, manifest_path)
print(f"OK global compliance matrix index ({len(rows)} jurisdictions, manifests validated)")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -44,6 +44,10 @@ fi
python3 "$ROOT/scripts/validation/validate-jvmtm-transaction-compliance-pack.py"
if [[ -f "$ROOT/config/compliance-matrix/global-compliance-matrix-index.json" ]]; then
python3 "$ROOT/scripts/validation/validate-global-compliance-matrix-index.py"
fi
if [[ "$HAVE_CHECK_JSONSCHEMA" == "1" ]]; then
echo "OK jvmtm-regulatory-closure schema validation (11 example/schema pairs + transaction-grade pack checks)"
else

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
# Validate naming-conventions registry example JSON against token-registry-entry.schema.json.
# Uses check-jsonschema when available (pip install check-jsonschema).
#
# Env:
# SCHEMA_STRICT=1 exit 1 if check-jsonschema is missing (default: skip with 0)
#
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SCHEMA="$ROOT/docs/04-configuration/naming-conventions/schemas/token-registry-entry.schema.json"
EX_DIR="$ROOT/docs/04-configuration/naming-conventions/examples"
if ! command -v check-jsonschema &>/dev/null; then
if [[ "${SCHEMA_STRICT:-0}" == "1" ]]; then
echo "error: check-jsonschema not found; pip install check-jsonschema" >&2
exit 1
fi
echo "skip: check-jsonschema not installed (pip install check-jsonschema)"
exit 0
fi
if [[ ! -f "$SCHEMA" ]]; then
echo "error: missing schema $SCHEMA" >&2
exit 1
fi
check-jsonschema --schemafile "$SCHEMA" "$EX_DIR/gru-cusdc-chain138.example.json"
check-jsonschema --schemafile "$SCHEMA" "$EX_DIR/gru-cusdc-v2-chain138.example.json"
check-jsonschema --schemafile "$SCHEMA" "$EX_DIR/gru-cusdt-chain138.example.json"
check-jsonschema --schemafile "$SCHEMA" "$EX_DIR/gru-cusdt-v2-chain138.example.json"
check-jsonschema --schemafile "$SCHEMA" "$EX_DIR/utrnf-lending-triad.example.json"
check-jsonschema --schemafile "$SCHEMA" "$EX_DIR/utrnf-collateral-placeholder.example.json"
echo "OK naming-conventions registry examples (token-registry-entry schema)"

View File

@@ -0,0 +1,34 @@
#!/usr/bin/env bash
# Parse-check JSON under config/xdc-zero/ (templates + merge fragments).
# Usage: bash scripts/validation/validate-xdc-zero-config.sh [--dry-run]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
DIR="$PROJECT_ROOT/config/xdc-zero"
DRY_RUN=false
for a in "$@"; do [[ "$a" == "--dry-run" ]] && DRY_RUN=true && break; done
if $DRY_RUN; then
echo "Would jq-empty-parse: $DIR/*.json"
exit 0
fi
command -v jq >/dev/null 2>&1 || {
echo "ERROR: jq required" >&2
exit 1
}
ERR=0
for f in "$DIR"/*.json; do
[[ -f "$f" ]] || continue
if jq empty "$f" 2>/dev/null; then
echo "[OK] $f"
else
echo "[ERROR] invalid JSON: $f" >&2
ERR=1
fi
done
exit "$ERR"