Files
proxmox/scripts/verify/build-liquidity-pools-master-map.py
defiQUG b8613905bd
Some checks failed
Deploy to Phoenix / validate (push) Failing after 15s
Deploy to Phoenix / deploy (push) Has been skipped
chore: sync workspace — configs, docs, scripts, CI, pnpm, submodules
- Submodule pins: dbis_core, cross-chain-pmm-lps, mcp-proxmox (local, push may be pending), metamask-integration, smom-dbis-138
- Atomic swap + cross-chain-pmm-lops-publish, deploy-portal workflow, phoenix deploy-targets, routing/aggregator matrices
- Docs, token-lists, forge proxy, phoenix API, runbooks, verify scripts

Made-with: Cursor
2026-04-21 22:01:33 -07:00

595 lines
25 KiB
Python

#!/usr/bin/env python3
"""Build a live liquidity-pool inventory from deployment-status + on-chain reserves.
Outputs:
- reports/status/liquidity-pools-master-map-latest.json
- docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md
"""
from __future__ import annotations
import json
import os
import subprocess
import sys
import time
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps/config/deployment-status.json"
CHAIN138_EXECUTION = ROOT / "smom-dbis-138/config/chain138-eth-pmm-pools-execution.json"
CHAIN138_PMM = ROOT / "smom-dbis-138/config/chain138-pmm-pools.json"
OUT_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
OUT_MD = ROOT / "docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md"
UNIV2_DISCOVERY = ROOT / "reports/extraction/promod-uniswap-v2-live-pair-discovery-latest.json"
GRU_GAP_REPORT = ROOT / "reports/extraction/promod-gru-v2-full-mesh-gap-report-latest.json"
POOL_LIST_KEYS = ("pmmPools", "pmmPoolsVolatile", "gasPmmPools", "uniswapV2Pools")
CHAIN_ORDER = ["138", "651940", "1", "10", "25", "56", "100", "137", "8453", "42161", "42220", "43114", "1111"]
RPC_DEFAULTS = {
"138": os.environ.get("CHAIN138_RPC_URL")
or os.environ.get("RPC_URL_138")
or os.environ.get("RPC_URL")
or "https://rpc-core.d-bis.org",
"651940": os.environ.get("CHAIN_651940_RPC")
or os.environ.get("CHAIN_651940_RPC_URL")
or os.environ.get("ALLTRA_MAINNET_RPC")
or "https://mainnet-rpc.alltra.global",
"1": os.environ.get("ETHEREUM_MAINNET_RPC") or "https://eth.llamarpc.com",
"10": os.environ.get("OPTIMISM_MAINNET_RPC") or os.environ.get("OPTIMISM_RPC_URL") or "https://mainnet.optimism.io",
"25": os.environ.get("CRONOS_RPC") or os.environ.get("CRONOS_MAINNET_RPC") or "https://evm.cronos.org",
"56": os.environ.get("BSC_RPC_URL") or os.environ.get("BSC_MAINNET_RPC") or "https://bsc-dataseed.binance.org",
"100": os.environ.get("GNOSIS_RPC") or os.environ.get("GNOSIS_MAINNET_RPC") or "https://rpc.gnosischain.com",
"137": os.environ.get("POLYGON_MAINNET_RPC") or os.environ.get("POLYGON_RPC_URL") or "https://polygon-bor-rpc.publicnode.com",
"8453": os.environ.get("BASE_MAINNET_RPC") or os.environ.get("BASE_RPC_URL") or "https://mainnet.base.org",
"42161": os.environ.get("ARBITRUM_MAINNET_RPC") or os.environ.get("ARBITRUM_RPC_URL") or "https://arb1.arbitrum.io/rpc",
"42220": os.environ.get("CELO_RPC") or os.environ.get("CELO_MAINNET_RPC") or "https://forno.celo.org",
"43114": os.environ.get("AVALANCHE_RPC_URL") or os.environ.get("AVALANCHE_MAINNET_RPC") or "https://api.avax.network/ext/bc/C/rpc",
"1111": os.environ.get("WEMIX_RPC") or os.environ.get("WEMIX_MAINNET_RPC") or "https://api.wemix.com",
}
PLACEHOLDER_NOTES = {"placeholder_scaffold_not_live"}
ZERO_ADDR = "0x0000000000000000000000000000000000000000"
def load_json(path: Path) -> Any:
return json.loads(path.read_text())
def cast_call(rpc: str, to: str, sig: str, *args: str) -> list[str]:
cmd = ["cast", "call", to, sig, *args, "--rpc-url", rpc]
last_exc: Exception | None = None
for attempt in range(3):
try:
out = subprocess.check_output(cmd, text=True, stderr=subprocess.DEVNULL, timeout=5).strip()
return [line.strip() for line in out.splitlines() if line.strip()]
except Exception as exc:
last_exc = exc
if attempt < 2:
time.sleep(0.25 * (attempt + 1))
raise last_exc or RuntimeError(f"cast call failed for {to} {sig}")
def cast_code(rpc: str, address: str) -> str:
cmd = ["cast", "code", address, "--rpc-url", rpc]
last_exc: Exception | None = None
for attempt in range(3):
try:
return subprocess.check_output(cmd, text=True, stderr=subprocess.DEVNULL, timeout=5).strip()
except Exception as exc:
last_exc = exc
if attempt < 2:
time.sleep(0.25 * (attempt + 1))
raise last_exc or RuntimeError(f"cast code failed for {address}")
def clean_int(value: str) -> int:
token = value.split()[0]
if token.startswith("0x"):
return int(token, 16)
return int(token)
def is_placeholder_address(addr: str | None) -> bool:
if not addr or not isinstance(addr, str) or not addr.startswith("0x"):
return True
body = addr[2:].lower()
if body == "0" * 40:
return True
return "0" * 20 in body
def human_amount(raw: int | None, decimals: int | None) -> str | None:
if raw is None or decimals is None:
return None
negative = raw < 0
raw = abs(raw)
scale = 10 ** decimals
whole = raw // scale
frac = raw % scale
frac_s = f"{frac:0{decimals}d}".rstrip("0")
text = str(whole) if not frac_s else f"{whole}.{frac_s}"
return f"-{text}" if negative else text
def markdown_table(headers: list[str], rows: list[list[str]]) -> str:
out = ["| " + " | ".join(headers) + " |", "| " + " | ".join(["---"] * len(headers)) + " |"]
for row in rows:
out.append("| " + " | ".join(row) + " |")
return "\n".join(out)
@dataclass
class TokenInfo:
symbol: str
address: str | None
decimals: int | None
class PoolBuilder:
def __init__(self) -> None:
self.deployment_status = load_json(DEPLOYMENT_STATUS)
self.chain138_execution = load_json(CHAIN138_EXECUTION)
self.chain138_pmm = load_json(CHAIN138_PMM)
self.univ2_discovery = load_json(UNIV2_DISCOVERY) if UNIV2_DISCOVERY.exists() else None
self.gru_gap_report = load_json(GRU_GAP_REPORT) if GRU_GAP_REPORT.exists() else None
self.decimals_cache: dict[tuple[str, str], int | None] = {}
self.univ2_health_index = self.build_univ2_health_index()
self.documented_live_dodo_pairs = self.build_documented_live_dodo_pairs()
self.fallback_symbol_decimals = {
"WETH": 18,
"WETH9": 18,
"WETH10": 18,
"WETHL2": 18,
"WWEMIX": 18,
"WCRO": 18,
"WALL": 18,
"cWETH": 18,
"cWETHL2": 18,
}
def build_univ2_health_index(self) -> dict[tuple[int, str], dict[str, Any]]:
index: dict[tuple[int, str], dict[str, Any]] = {}
if not self.univ2_discovery:
return index
for entry in self.univ2_discovery.get("entries", []):
chain_id = int(entry["chain_id"])
for pair in entry.get("pairsChecked", []):
addr = pair.get("poolAddress")
if addr and addr != ZERO_ADDR:
index[(chain_id, addr.lower())] = pair
return index
def build_documented_live_dodo_pairs(self) -> dict[int, set[str]]:
out: dict[int, set[str]] = {}
if not self.gru_gap_report:
return out
for chain in self.gru_gap_report.get("chains", []):
dodo = ((chain.get("venue_status") or {}).get("dodo_pmm") or {})
out[int(chain["chain_id"])] = set(dodo.get("live_pairs", []))
return out
def resolve_token_address(self, chain_id: str, chain_data: dict[str, Any], symbol: str) -> str | None:
if chain_id == "138":
if symbol in self.chain138_execution.get("tokens", {}):
return self.chain138_execution["tokens"][symbol]
if symbol in chain_data.get("anchorAddresses", {}):
return chain_data["anchorAddresses"][symbol]
if symbol in self.chain138_pmm.get("tokens", {}):
return self.chain138_pmm["tokens"][symbol]
for key in ("cwTokens", "anchorAddresses", "gasMirrors", "gasQuoteAddresses"):
mapping = chain_data.get(key, {})
if symbol in mapping:
return mapping[symbol]
return None
def token_decimals(self, rpc: str, address: str | None, chain_id: str, symbol: str | None = None) -> int | None:
if not address:
return None
cache_key = (chain_id, address.lower())
if cache_key in self.decimals_cache:
return self.decimals_cache[cache_key]
try:
lines = cast_call(rpc, address, "decimals()(uint8)")
dec = clean_int(lines[0])
if dec == 0 and (symbol or "") in self.fallback_symbol_decimals:
dec = self.fallback_symbol_decimals[symbol or ""]
except Exception:
dec = self.fallback_symbol_decimals.get(symbol or "")
self.decimals_cache[cache_key] = dec
return dec
def build_pool_rows(self) -> dict[str, Any]:
chains = self.deployment_status["chains"]
generated_at = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
report_chains: list[dict[str, Any]] = []
for chain_id in CHAIN_ORDER:
chain_data = chains.get(chain_id)
if not chain_data:
continue
rpc = RPC_DEFAULTS.get(chain_id)
chain_rows: list[dict[str, Any]] = []
for list_key in POOL_LIST_KEYS:
for pool in chain_data.get(list_key, []):
row = self.build_pool_row(chain_id, chain_data, list_key, pool, rpc)
chain_rows.append(row)
reference_venues = [self.build_reference_venue_row(chain_id, chain_data, venue) for venue in chain_data.get("gasReferenceVenues", [])]
report_chains.append(
{
"chainId": int(chain_id),
"network": chain_data["name"],
"rpc": rpc,
"poolCount": len(chain_rows),
"pools": chain_rows,
"referenceVenueCount": len(reference_venues),
"referenceVenues": reference_venues,
}
)
return {
"generatedAt": generated_at,
"sourceFiles": [
str(DEPLOYMENT_STATUS.relative_to(ROOT)),
str(CHAIN138_EXECUTION.relative_to(ROOT)),
str(CHAIN138_PMM.relative_to(ROOT)),
],
"chains": report_chains,
}
def build_pool_row(
self,
chain_id: str,
chain_data: dict[str, Any],
list_key: str,
pool: dict[str, Any],
rpc: str | None,
) -> dict[str, Any]:
base_symbol = pool.get("base") or pool.get("tokenIn")
quote_symbol = pool.get("quote") or pool.get("tokenOut")
base_address = self.resolve_token_address(chain_id, chain_data, base_symbol) if base_symbol else None
quote_address = self.resolve_token_address(chain_id, chain_data, quote_symbol) if quote_symbol else None
venue = pool.get("venue") or ("uniswap_v2_pair" if list_key == "uniswapV2Pools" else "dodo_pmm")
notes = list(pool.get("notes", []))
live_enabled = pool.get("publicRoutingEnabled")
row: dict[str, Any] = {
"chainId": int(chain_id),
"network": chain_data["name"],
"inventoryGroup": list_key,
"venue": venue,
"baseSymbol": base_symbol,
"baseAddress": base_address,
"quoteSymbol": quote_symbol,
"quoteAddress": quote_address,
"poolAddress": pool.get("poolAddress"),
"feeBps": pool.get("feeBps"),
"k": pool.get("k"),
"role": pool.get("role"),
"poolType": pool.get("poolType"),
"publicRoutingEnabled": live_enabled,
"notes": notes,
"status": "configured",
"balances": None,
"queryError": None,
"balanceSource": None,
}
if list_key == "uniswapV2Pools":
row["factoryAddress"] = pool.get("factoryAddress")
row["routerAddress"] = pool.get("routerAddress")
pool_address = pool.get("poolAddress")
if not rpc:
row["status"] = "rpc_missing"
return row
if is_placeholder_address(pool_address) or any(note in PLACEHOLDER_NOTES for note in notes):
row["status"] = "planned_gas_placeholder" if list_key == "gasPmmPools" else "placeholder_not_live"
return row
if not pool_address or not base_address or not quote_address:
row["status"] = "address_resolution_missing"
return row
try:
if list_key == "uniswapV2Pools":
balances = self.fetch_uniswap_v2_discovery_balances(int(chain_id), pool_address, base_symbol, quote_symbol, base_address, quote_address, rpc)
if balances is not None:
row["balances"] = balances
row["status"] = "live"
row["balanceSource"] = "uniswap_v2_live_pair_discovery"
else:
row["status"] = "configured_no_live_balance"
elif chain_id == "138":
row["balances"] = self.fetch_dodo_balances(rpc, chain_id, pool_address, base_symbol, quote_symbol, base_address, quote_address)
row["status"] = "live"
row["balanceSource"] = "rpc_getVaultReserve"
else:
code = cast_code(rpc, pool_address)
if code in ("", "0x"):
row["status"] = "configured_no_code"
elif list_key == "pmmPools" and f"{base_symbol}/{quote_symbol}" in self.documented_live_dodo_pairs.get(int(chain_id), set()):
row["status"] = "live_documented_no_balance"
row["balanceSource"] = "promod_gru_v2_full_mesh_gap_report"
else:
try:
row["balances"] = self.fetch_dodo_balances(rpc, chain_id, pool_address, base_symbol, quote_symbol, base_address, quote_address)
row["status"] = "live"
row["balanceSource"] = "rpc_getVaultReserve"
except Exception:
row["status"] = "configured_code_present_no_balance_read"
except Exception as exc:
if list_key == "pmmPools" and f"{base_symbol}/{quote_symbol}" in self.documented_live_dodo_pairs.get(int(chain_id), set()):
row["status"] = "live_documented_no_balance"
row["balanceSource"] = "promod_gru_v2_full_mesh_gap_report"
row["queryError"] = str(exc)
else:
row["status"] = "query_failed"
row["queryError"] = str(exc)
return row
def build_reference_venue_row(self, chain_id: str, chain_data: dict[str, Any], venue: dict[str, Any]) -> dict[str, Any]:
base_symbol = venue.get("base")
quote_symbol = venue.get("quote")
notes = list(venue.get("notes", []))
if any(note in PLACEHOLDER_NOTES for note in notes):
status = "planned_reference_placeholder"
elif venue.get("live"):
status = "live"
elif venue.get("supported"):
status = "supported_not_live"
else:
status = "unsupported"
return {
"chainId": int(chain_id),
"network": chain_data["name"],
"protocol": venue.get("protocol"),
"familyKey": venue.get("familyKey"),
"baseSymbol": base_symbol,
"baseAddress": self.resolve_token_address(chain_id, chain_data, base_symbol) if base_symbol else None,
"quoteSymbol": quote_symbol,
"quoteAddress": self.resolve_token_address(chain_id, chain_data, quote_symbol) if quote_symbol else None,
"venueAddress": venue.get("venueAddress"),
"supported": venue.get("supported"),
"live": venue.get("live"),
"routingVisible": venue.get("routingVisible"),
"reference": venue.get("reference"),
"aggregatorOnly": venue.get("aggregatorOnly"),
"indexRequired": venue.get("indexRequired"),
"dependsOn": venue.get("dependsOn", []),
"notes": notes,
"status": status,
}
def fetch_uniswap_v2_discovery_balances(
self,
chain_id: int,
pool_address: str,
base_symbol: str,
quote_symbol: str,
base_address: str,
quote_address: str,
rpc: str,
) -> dict[str, Any] | None:
pair = self.univ2_health_index.get((chain_id, pool_address.lower()))
if not pair or not pair.get("live"):
return None
health = pair.get("health") or {}
base_raw = int(health["baseReserveRaw"])
quote_raw = int(health["quoteReserveRaw"])
base_dec = self.token_decimals(rpc, base_address, str(chain_id), base_symbol)
quote_dec = self.token_decimals(rpc, quote_address, str(chain_id), quote_symbol)
return {
"base": {
"symbol": base_symbol,
"address": base_address,
"raw": str(base_raw),
"decimals": base_dec,
"formatted": health.get("baseReserveUnits") or human_amount(base_raw, base_dec),
},
"quote": {
"symbol": quote_symbol,
"address": quote_address,
"raw": str(quote_raw),
"decimals": quote_dec,
"formatted": health.get("quoteReserveUnits") or human_amount(quote_raw, quote_dec),
},
"health": health,
}
def fetch_dodo_balances(
self,
rpc: str,
chain_id: str,
pool_address: str,
base_symbol: str,
quote_symbol: str,
base_address: str,
quote_address: str,
) -> dict[str, Any]:
lines = cast_call(rpc, pool_address, "getVaultReserve()(uint256,uint256)")
base_raw = clean_int(lines[0])
quote_raw = clean_int(lines[1])
base_dec = self.token_decimals(rpc, base_address, chain_id, base_symbol)
quote_dec = self.token_decimals(rpc, quote_address, chain_id, quote_symbol)
return {
"base": {
"symbol": base_symbol,
"address": base_address,
"raw": str(base_raw),
"decimals": base_dec,
"formatted": human_amount(base_raw, base_dec),
},
"quote": {
"symbol": quote_symbol,
"address": quote_address,
"raw": str(quote_raw),
"decimals": quote_dec,
"formatted": human_amount(quote_raw, quote_dec),
},
}
def fetch_uniswap_v2_balances(
self,
rpc: str,
chain_id: str,
pool_address: str,
base_symbol: str,
quote_symbol: str,
base_address: str,
quote_address: str,
) -> dict[str, Any]:
reserves = cast_call(rpc, pool_address, "getReserves()(uint112,uint112,uint32)")
reserve0 = clean_int(reserves[0])
reserve1 = clean_int(reserves[1])
token0 = cast_call(rpc, pool_address, "token0()(address)")[0].split()[0]
token1 = cast_call(rpc, pool_address, "token1()(address)")[0].split()[0]
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
base_raw, quote_raw = reserve0, reserve1
elif token0.lower() == quote_address.lower() and token1.lower() == base_address.lower():
base_raw, quote_raw = reserve1, reserve0
else:
raise RuntimeError(f"pair token mismatch: token0={token0} token1={token1}")
base_dec = self.token_decimals(rpc, base_address, chain_id, base_symbol)
quote_dec = self.token_decimals(rpc, quote_address, chain_id, quote_symbol)
return {
"base": {
"symbol": base_symbol,
"address": base_address,
"raw": str(base_raw),
"decimals": base_dec,
"formatted": human_amount(base_raw, base_dec),
},
"quote": {
"symbol": quote_symbol,
"address": quote_address,
"raw": str(quote_raw),
"decimals": quote_dec,
"formatted": human_amount(quote_raw, quote_dec),
},
}
def render_markdown(report: dict[str, Any]) -> str:
generated_at = report["generatedAt"]
lines = [
"# Liquidity Pools Master Map — Chain 138, ALL Mainnet, and Public Networks",
"",
f"**Last Updated:** {generated_at}",
"**Document Version:** 2.0",
"**Status:** Generated from deployment-status plus live reserve reads where supported",
"",
"## Overview",
"",
"This document is generated from the machine-readable deployment graph in `cross-chain-pmm-lps/config/deployment-status.json`, plus live reserve reads from chain RPCs.",
"",
"Pool categories:",
"- `pmmPools`: standard DODO PMM routing pools",
"- `pmmPoolsVolatile`: volatile-route PMM pools",
"- `gasPmmPools`: gas-family PMM pools",
"- `uniswapV2Pools`: Uniswap V2-compatible pairs",
"",
]
summary_rows: list[list[str]] = []
for chain in report["chains"]:
live = sum(1 for pool in chain["pools"] if str(pool["status"]).startswith("live"))
planned_placeholder = sum(1 for pool in chain["pools"] if pool["status"] == "planned_gas_placeholder")
failed = sum(1 for pool in chain["pools"] if pool["status"] == "query_failed")
summary_rows.append(
[
str(chain["chainId"]),
chain["network"],
str(chain["poolCount"]),
str(live),
str(planned_placeholder),
str(failed),
str(chain.get("referenceVenueCount", 0)),
chain["rpc"] or "n/a",
]
)
lines += ["## Network Summary", "", markdown_table(["ChainID", "Network", "Pools", "Live Read", "Planned Gas Placeholder", "Query Failed", "Reference Venues", "RPC"], summary_rows), ""]
for chain in report["chains"]:
lines += [f"## {chain['network']} ({chain['chainId']})", ""]
if not chain["pools"]:
lines += ["No pools are listed in the canonical deployment graph for this network.", ""]
continue
rows: list[list[str]] = []
for pool in chain["pools"]:
balances = pool.get("balances") or {}
base_bal = balances.get("base", {}).get("formatted") if balances else None
quote_bal = balances.get("quote", {}).get("formatted") if balances else None
rows.append(
[
pool["inventoryGroup"],
pool["venue"],
f"{pool['baseSymbol']} / {pool['quoteSymbol']}",
f"`{pool['poolAddress']}`",
f"`{pool['baseAddress']}`" if pool.get("baseAddress") else "",
f"`{pool['quoteAddress']}`" if pool.get("quoteAddress") else "",
base_bal or "",
quote_bal or "",
pool["status"],
]
)
lines += [
markdown_table(
["Group", "Venue", "Pair", "Pool", "Base Token", "Quote Token", "Base Balance", "Quote Balance", "Status"],
rows,
),
"",
]
ref_rows = chain.get("referenceVenues", [])
if ref_rows:
ref_table_rows = []
for venue in ref_rows:
ref_table_rows.append(
[
venue["protocol"],
f"{venue.get('baseSymbol')} / {venue.get('quoteSymbol')}",
f"`{venue['venueAddress']}`" if venue.get("venueAddress") else "",
"yes" if venue.get("supported") else "no",
"yes" if venue.get("live") else "no",
"yes" if venue.get("routingVisible") else "no",
venue["status"],
]
)
lines += [
f"### {chain['network']} Reference Venues",
"",
markdown_table(["Protocol", "Pair", "Venue Address", "Supported", "Live", "Routing Visible", "Status"], ref_table_rows),
"",
]
lines += [
"## Source Files",
"",
"- `cross-chain-pmm-lps/config/deployment-status.json`",
"- `smom-dbis-138/config/chain138-eth-pmm-pools-execution.json`",
"- `smom-dbis-138/config/chain138-pmm-pools.json`",
f"- `reports/status/{OUT_JSON.name}`",
"",
]
return "\n".join(lines)
def main() -> int:
builder = PoolBuilder()
report = builder.build_pool_rows()
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(report, indent=2) + "\n")
OUT_MD.write_text(render_markdown(report) + "\n")
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
return 0
if __name__ == "__main__":
sys.exit(main())