2026-03-02 11:37:34 -08:00
#!/usr/bin/env bash
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
# Deploy token-aggregation service for publication (token lists, CoinGecko/CMC reports, bridge/routes).
2026-03-02 11:37:34 -08:00
# Run on explorer VM (VMID 5000) or host that serves explorer.d-bis.org.
#
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
# Prerequisites: Node 20+, PostgreSQL (for full indexing; API responds with defaults if DB empty)
2026-03-02 11:37:34 -08:00
# Usage: ./scripts/deploy-token-aggregation-for-publication.sh [INSTALL_DIR]
#
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
# After deploy: nginx must proxy /api/v1/ to this service BEFORE Blockscout (see TOKEN_AGGREGATION_REPORT_API_RUNBOOK).
# Explorer layouts vary: port 3000 or 3001 — match TOKEN_AGG_PORT in apply-nginx scripts.
2026-03-02 11:37:34 -08:00
set -euo pipefail
REPO_ROOT = " $( cd " $( dirname " ${ BASH_SOURCE [0] } " ) /.. " && pwd ) "
INSTALL_DIR = " ${ 1 :- $REPO_ROOT /token-aggregation-build } "
2026-04-12 06:12:20 -07:00
BUNDLE_ROOT = " $INSTALL_DIR "
SERVICE_INSTALL_DIR = " $BUNDLE_ROOT /smom-dbis-138/services/token-aggregation "
2026-03-02 11:37:34 -08:00
SVC_DIR = " $REPO_ROOT /smom-dbis-138/services/token-aggregation "
2026-04-12 06:12:20 -07:00
# shellcheck source=/dev/null
source " $REPO_ROOT /scripts/lib/load-project-env.sh " >/dev/null 2>& 1 || true
upsert_env_var( ) {
local env_file = " $1 "
local key = " $2 "
local value = " ${ 3 :- } "
[ [ -n " $value " ] ] || return 0
if grep -q " ^ ${ key } = " " $env_file " ; then
sed -i " s|^ ${ key } =.*| ${ key } = ${ value } | " " $env_file "
else
printf '%s=%s\n' " $key " " $value " >> " $env_file "
fi
}
ensure_env_key( ) {
local env_file = " $1 "
local key = " $2 "
grep -q " ^ ${ key } = " " $env_file " && return 0
printf '%s=\n' " $key " >> " $env_file "
}
derive_cw_reserve_verifier( ) {
if [ [ -n " ${ CW_RESERVE_VERIFIER_CHAIN138 :- } " ] ] ; then
printf '%s' " $CW_RESERVE_VERIFIER_CHAIN138 "
return 0
fi
local broadcast_json = " $REPO_ROOT /smom-dbis-138/broadcast/DeployCWReserveVerifier.s.sol/138/run-latest.json "
[ [ -f " $broadcast_json " ] ] || return 0
node -e '
const fs = require( "fs" ) ;
const file = process.argv[ 1] ;
try {
const data = JSON.parse( fs.readFileSync( file, "utf8" ) ) ;
const tx = ( data.transactions || [ ] ) .find(
( entry) = > entry.contractName = = = "CWReserveVerifier" &&
entry.transactionType = = = "CREATE" &&
entry.contractAddress
) ;
if ( tx?.contractAddress) process.stdout.write( tx.contractAddress) ;
} catch ( _) {
process.exit( 0) ;
}
' " $broadcast_json "
}
derive_cw_asset_reserve_verifier( ) {
if [ [ -n " ${ CW_ASSET_RESERVE_VERIFIER_DEPLOYED_CHAIN138 :- } " ] ] ; then
printf '%s' " $CW_ASSET_RESERVE_VERIFIER_DEPLOYED_CHAIN138 "
return 0
fi
node - <<'NODE' " $REPO_ROOT /config/smart-contracts-master.json "
const fs = require( 'fs' ) ;
const file = process.argv[ 2] ;
try {
const data = JSON.parse( fs.readFileSync( file, 'utf8' ) ) ;
const address = data?.chains?.[ '138' ] ?.contracts?.CWAssetReserveVerifier;
if ( typeof address = = = 'string' && /^0x[ a-fA-F0-9] { 40} $/.test( address) ) {
process.stdout.write( address) ;
}
} catch ( _) {
process.exit( 0) ;
}
NODE
}
derive_gru_transport_policy_amount( ) {
local env_key = " $1 "
node - <<'NODE' " $REPO_ROOT /config/gru-transport-active.json " " $env_key "
const fs = require( 'fs' ) ;
const file = process.argv[ 2] ;
const envKey = process.argv[ 3] ;
try {
const data = JSON.parse( fs.readFileSync( file, 'utf8' ) ) ;
const familiesByKey = new Map( ) ;
for ( const family of data.gasAssetFamilies || [ ] ) {
if ( family && family.familyKey) familiesByKey.set( String( family.familyKey) , family) ;
}
for ( const pair of data.transportPairs || [ ] ) {
if ( !pair || pair.active = = = false ) continue ;
if ( pair?.maxOutstanding?.env = = = envKey) {
const family = familiesByKey.get( String( pair.familyKey || '' ) ) ;
const cap = family?.perLaneCaps?.[ String( pair.destinationChainId) ] ;
if ( typeof cap = = = 'string' && cap.trim( ) ) {
process.stdout.write( cap.trim( ) ) ;
}
process.exit( 0) ;
}
}
} catch ( _) {
process.exit( 0) ;
}
NODE
}
sync_gru_transport_env( ) {
local env_file = " $1 "
local chain138_l1_bridge = " ${ CHAIN138_L1_BRIDGE :- ${ CW_L1_BRIDGE_CHAIN138 :- } } "
local reserve_verifier = ""
local asset_reserve_verifier = ""
local key = ""
local value = ""
local derived_value = ""
local refs = ( )
reserve_verifier = " $( derive_cw_reserve_verifier || true ) "
asset_reserve_verifier = " $( derive_cw_asset_reserve_verifier || true ) "
upsert_env_var " $env_file " "CHAIN138_L1_BRIDGE" " $chain138_l1_bridge "
upsert_env_var " $env_file " "CW_RESERVE_VERIFIER_CHAIN138" " $reserve_verifier "
upsert_env_var " $env_file " "CW_ASSET_RESERVE_VERIFIER_DEPLOYED_CHAIN138" " $asset_reserve_verifier "
refs = ( )
while IFS = read -r key; do
[ [ -n " $key " ] ] && refs += ( " $key " )
done < <(
node - <<'NODE' " $REPO_ROOT /config/gru-transport-active.json "
const fs = require( 'fs' ) ;
const file = process.argv[ 2] ;
const data = JSON.parse( fs.readFileSync( file, 'utf8' ) ) ;
const refs = new Set( ) ;
function walk( value) {
if ( Array.isArray( value) ) {
value.forEach( walk) ;
return ;
}
if ( !value || typeof value != = 'object' ) return ;
if ( typeof value.env = = = 'string' && value.env.trim( ) ) refs.add( value.env.trim( ) ) ;
Object.values( value) .forEach( walk) ;
}
walk( data) ;
for ( const key of [ ...refs] .sort( ) ) console.log( key) ;
NODE
)
for key in " ${ refs [@] } " ; do
case " $key " in
CHAIN138_L1_BRIDGE)
value = " $chain138_l1_bridge "
; ;
CW_RESERVE_VERIFIER_CHAIN138)
value = " ${ CW_RESERVE_VERIFIER_CHAIN138 :- $reserve_verifier } "
; ;
CW_MAX_OUTSTANDING_*)
derived_value = " $( derive_gru_transport_policy_amount " $key " || true ) "
value = " ${ !key :- $derived_value } "
; ;
CW_GAS_OUTSTANDING_*| CW_GAS_ESCROWED_*| CW_GAS_TREASURY_BACKED_*| CW_GAS_TREASURY_CAP_*)
value = " ${ !key :- 0 } "
; ;
*)
value = " ${ !key :- } "
; ;
esac
if [ [ -n " $value " ] ] ; then
upsert_env_var " $env_file " " $key " " $value "
else
ensure_env_key " $env_file " " $key "
fi
done
}
sync_chain138_public_rpc_env( ) {
local env_file = " $1 "
local public_chain138_rpc = " ${ TOKEN_AGG_CHAIN138_RPC_URL :- http : //192.168.11.221 : 8545 } "
# Explorer-side read services must use the public Chain 138 RPC node, not the
# operator/deploy core RPC.
upsert_env_var " $env_file " "CHAIN_138_RPC_URL" " $public_chain138_rpc "
upsert_env_var " $env_file " "RPC_URL_138" " $public_chain138_rpc "
# Explicit alias for GET /api/v1/quote PMM on-chain path (see pmm-onchain-quote.ts).
upsert_env_var " $env_file " "TOKEN_AGGREGATION_CHAIN138_RPC_URL" " $public_chain138_rpc "
# Optional operator override: set in repo .env before deploy to use core RPC for PMM eth_calls only
# while keeping publication indexing on the public node above.
if [ [ -n " ${ TOKEN_AGGREGATION_PMM_RPC_URL :- } " ] ] ; then
upsert_env_var " $env_file " "TOKEN_AGGREGATION_PMM_RPC_URL" " $TOKEN_AGGREGATION_PMM_RPC_URL "
fi
if [ [ -n " ${ TOKEN_AGGREGATION_PMM_QUERY_TRADER :- } " ] ] ; then
upsert_env_var " $env_file " "TOKEN_AGGREGATION_PMM_QUERY_TRADER" " $TOKEN_AGGREGATION_PMM_QUERY_TRADER "
fi
}
2026-03-02 11:37:34 -08:00
if [ ! -d " $SVC_DIR " ] ; then
echo " Token-aggregation not found at $SVC_DIR " >& 2
exit 1
fi
2026-04-12 06:12:20 -07:00
echo " Deploying token-aggregation bundle to $BUNDLE_ROOT "
mkdir -p " $BUNDLE_ROOT /config "
mkdir -p " $BUNDLE_ROOT /cross-chain-pmm-lps "
# Fresh copy: stale node_modules types (file vs dir) break cp -a on re-runs.
rm -rf " $SERVICE_INSTALL_DIR "
mkdir -p " $SERVICE_INSTALL_DIR "
cp -a " $SVC_DIR " /. " $SERVICE_INSTALL_DIR / "
cp -a " $REPO_ROOT /config " /. " $BUNDLE_ROOT /config/ "
cp -a " $REPO_ROOT /cross-chain-pmm-lps/config " " $BUNDLE_ROOT /cross-chain-pmm-lps/ "
cd " $SERVICE_INSTALL_DIR "
2026-03-02 11:37:34 -08:00
if [ ! -f .env ] ; then
if [ -f .env.example ] ; then
cp .env.example .env
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
echo "Created .env from .env.example — set DATABASE_URL for persistent index; CUSDT/CUSDC already defaulted."
2026-03-02 11:37:34 -08:00
else
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
echo "Create .env with at least DATABASE_URL (and optional CHAIN_138_RPC_URL)." >& 2
2026-03-02 11:37:34 -08:00
fi
fi
2026-04-12 06:12:20 -07:00
sync_gru_transport_env .env
sync_chain138_public_rpc_env .env
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
if command -v pnpm >/dev/null 2>& 1 && [ -f " $REPO_ROOT /pnpm-lock.yaml " ] ; then
( cd " $REPO_ROOT " && pnpm install --filter token-aggregation-service --no-frozen-lockfile 2>/dev/null) || true
fi
2026-04-12 06:12:20 -07:00
npm install
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
npm run build
2026-04-12 06:12:20 -07:00
npm prune --omit= dev >/dev/null 2>& 1 || true
2026-03-02 11:37:34 -08:00
echo ""
echo "Token-aggregation built. Start with:"
2026-04-12 06:12:20 -07:00
echo " cd $SERVICE_INSTALL_DIR && node dist/index.js "
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
echo "Or add systemd unit. Default port from code: 3000 (match nginx TOKEN_AGG_PORT / fix-explorer-http-api-v1-proxy.sh uses 3001)."
2026-03-02 11:37:34 -08:00
echo ""
2026-04-12 06:12:20 -07:00
echo "If this is a standalone token_aggregation database (no explorer-monorepo schema), bootstrap the lightweight schema first:"
echo " cd $SERVICE_INSTALL_DIR && bash scripts/apply-lightweight-schema.sh "
echo ""
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
echo "Then apply nginx proxy (on same host), e.g.:"
echo " TOKEN_AGG_PORT=3001 CONFIG_FILE=/etc/nginx/sites-available/blockscout \\"
echo " bash $REPO_ROOT /scripts/fix-explorer-http-api-v1-proxy.sh "
echo " # or: explorer-monorepo/scripts/apply-nginx-token-aggregation-proxy.sh"
2026-03-02 11:37:34 -08:00
echo ""
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
echo "Verify:"
echo " pnpm run verify:token-aggregation-api"
2026-04-12 06:12:20 -07:00
echo "Push to explorer VM (LAN):"
echo " EXPLORER_SSH=root@192.168.11.140 bash scripts/deployment/push-token-aggregation-bundle-to-explorer.sh $BUNDLE_ROOT "
chore: sync docs, config schemas, scripts, and meta task alignment
- Institutional / JVMTM / reserve-provenance / GRU transport + standards JSON
- Validation and verify scripts (Blockscout labels, x402, GRU preflight, P1 local path)
- Wormhole wiring in AGENTS, MCP_SETUP, MASTER_INDEX, 04-configuration README
- Meta docs, integration gaps, live verification log, architecture updates
- CI validate-config workflow updates
Operator/LAN items, submodule working trees, and public token-aggregation edge
routes remain follow-up (see TODOS_CONSOLIDATED P1).
Made-with: Cursor
2026-03-31 22:31:39 -07:00
echo " SKIP_BRIDGE_ROUTES=0 bash scripts/verify/check-public-report-api.sh https://explorer.d-bis.org"
2026-04-12 06:12:20 -07:00
echo " ALLOW_BLOCKED=1 bash scripts/verify/check-gru-transport-preflight.sh https://explorer.d-bis.org"
echo " bash scripts/verify/check-gas-public-pool-status.sh"
echo ""
echo "Notes:"
echo " CW_ASSET_RESERVE_VERIFIER_DEPLOYED_CHAIN138 is synced as a neutral reference only."
echo " Leave CW_GAS_STRICT_ESCROW_VERIFIER_CHAIN138 and CW_GAS_HYBRID_CAP_VERIFIER_CHAIN138 unset until the live L1 bridge is explicitly wired to the generic gas verifier."
echo " GET /api/v1/quote uses on-chain PMM when RPC is set (RPC_URL_138 or TOKEN_AGGREGATION_*); optional TOKEN_AGGREGATION_PMM_RPC_URL in operator .env overrides PMM calls only."