Files
proxmox/scripts/lib/promod_uniswap_v2_live_pair_discovery.py
defiQUG b8613905bd
Some checks failed
Deploy to Phoenix / validate (push) Failing after 15s
Deploy to Phoenix / deploy (push) Has been skipped
chore: sync workspace — configs, docs, scripts, CI, pnpm, submodules
- Submodule pins: dbis_core, cross-chain-pmm-lps, mcp-proxmox (local, push may be pending), metamask-integration, smom-dbis-138
- Atomic swap + cross-chain-pmm-lops-publish, deploy-portal workflow, phoenix deploy-targets, routing/aggregator matrices
- Docs, token-lists, forge proxy, phoenix API, runbooks, verify scripts

Made-with: Cursor
2026-04-21 22:01:33 -07:00

328 lines
12 KiB
Python

#!/usr/bin/env python3
from __future__ import annotations
from pathlib import Path
import argparse
from decimal import Decimal, getcontext
from functools import lru_cache
import json
import os
import re
import subprocess
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
getcontext().prec = 42
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
ENV_PATH = ROOT / "smom-dbis-138" / ".env"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_LIVE_PAIR_DISCOVERY.md"
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
HEALTHY_DEVIATION_BPS = Decimal("25")
MIN_HEALTHY_RESERVE_UNITS = Decimal("1000")
UINT_RE = re.compile(r"\b\d+\b")
CAST_CALL_TIMEOUT_SECONDS = int(os.environ.get("PROMOD_CAST_TIMEOUT_SECONDS", "20"))
CHAIN_CONFIG = {
"1": {"rpc_keys": ["ETHEREUM_MAINNET_RPC"], "hub": "USDC"},
"10": {"rpc_keys": ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"], "hub": "USDC"},
"25": {"rpc_keys": ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"], "hub": "USDT"},
"56": {"rpc_keys": ["BSC_RPC_URL", "BSC_MAINNET_RPC"], "hub": "USDT"},
"100": {"rpc_keys": ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"], "hub": "USDC"},
"137": {"rpc_keys": ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"], "hub": "USDC"},
"42220": {"rpc_keys": ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"], "hub": "USDC"},
"43114": {"rpc_keys": ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"], "hub": "USDC"},
"8453": {"rpc_keys": ["BASE_RPC_URL", "BASE_MAINNET_RPC"], "hub": "USDC"},
"42161": {"rpc_keys": ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"], "hub": "USDC"},
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load_json(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def load_env(path: Path):
values = {}
for raw_line in path.read_text().splitlines():
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
values[key.strip()] = value.strip()
return values
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
if seen is None:
seen = set()
if key in seen:
return os.environ.get(key, env_values.get(key, ""))
seen.add(key)
value = os.environ.get(key, env_values.get(key, ""))
if value.startswith("${") and value.endswith("}"):
inner = value[2:-1]
target = inner.split(":-", 1)[0]
return resolve_env_value(target, env_values, seen)
return value
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
return subprocess.check_output(cmd, text=True, timeout=CAST_CALL_TIMEOUT_SECONDS).strip()
@lru_cache(maxsize=1024)
def cast_call_cached(rpc_url: str, target: str, signature: str, *args: str) -> str:
return cast_call(rpc_url, target, signature, *args)
def parse_uint(value: str) -> int:
stripped = value.strip()
if not stripped:
raise ValueError(f"could not parse integer from {value!r}")
return int(stripped.split()[0])
def parse_uints(value: str, count: int) -> list[int]:
matches = []
for raw_line in value.splitlines():
line = raw_line.strip()
if not line:
continue
matches.append(int(line.split()[0]))
if len(matches) == count:
break
if len(matches) < count:
raise ValueError(f"expected {count} integers, got {value!r}")
return matches[:count]
def parse_address(value: str) -> str:
match = re.search(r"0x[a-fA-F0-9]{40}", value)
if not match:
raise ValueError(f"could not parse address from {value!r}")
return match.group(0)
def normalize_units(raw: int, decimals: int) -> Decimal:
return Decimal(raw) / (Decimal(10) ** decimals)
def compute_pair_health(rpc_url: str, pair_address: str, base_address: str, quote_address: str) -> dict:
token0 = parse_address(cast_call_cached(rpc_url, pair_address, "token0()(address)"))
token1 = parse_address(cast_call_cached(rpc_url, pair_address, "token1()(address)"))
reserve0_raw, reserve1_raw, _ = parse_uints(
cast_call_cached(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3
)
decimals0 = parse_uint(cast_call_cached(rpc_url, token0, "decimals()(uint8)"))
decimals1 = parse_uint(cast_call_cached(rpc_url, token1, "decimals()(uint8)"))
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
base_raw, quote_raw = reserve0_raw, reserve1_raw
base_decimals, quote_decimals = decimals0, decimals1
elif token1.lower() == base_address.lower() and token0.lower() == quote_address.lower():
base_raw, quote_raw = reserve1_raw, reserve0_raw
base_decimals, quote_decimals = decimals1, decimals0
else:
raise ValueError(f"pair tokens {token0}/{token1} do not match {base_address}/{quote_address}")
base_units = normalize_units(base_raw, base_decimals)
quote_units = normalize_units(quote_raw, quote_decimals)
price = Decimal(0) if base_units == 0 else quote_units / base_units
deviation_bps = abs(price - Decimal(1)) * Decimal(10000)
depth_ok = base_units >= MIN_HEALTHY_RESERVE_UNITS and quote_units >= MIN_HEALTHY_RESERVE_UNITS
parity_ok = deviation_bps <= HEALTHY_DEVIATION_BPS
return {
"baseReserveRaw": str(base_raw),
"quoteReserveRaw": str(quote_raw),
"baseReserveUnits": str(base_units),
"quoteReserveUnits": str(quote_units),
"priceQuotePerBase": str(price),
"deviationBps": str(deviation_bps),
"depthOk": depth_ok,
"parityOk": parity_ok,
"healthy": depth_ok and parity_ok,
}
def candidate_pairs(chain: dict) -> list[tuple[str, str, str, str]]:
cw = chain.get("cwTokens", {})
anchors = chain.get("anchorAddresses", {})
hub = "USDC" if "USDC" in anchors else "USDT"
pairs: list[tuple[str, str, str, str]] = []
for base in ["cWUSDC", "cWUSDT", "cWAUSDT"]:
if base in cw and hub in anchors:
pairs.append((base, hub, cw[base], anchors[hub]))
if "cWUSDT" in cw and "cWUSDC" in cw:
pairs.append(("cWUSDT", "cWUSDC", cw["cWUSDT"], cw["cWUSDC"]))
if "cWAUSDT" in cw and "cWUSDT" in cw:
pairs.append(("cWAUSDT", "cWUSDT", cw["cWAUSDT"], cw["cWUSDT"]))
if "cWAUSDT" in cw and "cWUSDC" in cw:
pairs.append(("cWAUSDT", "cWUSDC", cw["cWAUSDT"], cw["cWUSDC"]))
return pairs
def append_discovered_pair(status: dict, chain_id: str, pair: dict):
chain = status["chains"][chain_id]
rows = chain.setdefault("uniswapV2Pools", [])
normalized = pair["poolAddress"].lower()
if any(str(row.get("poolAddress", "")).lower() == normalized for row in rows):
return False
rows.append(pair)
return True
def build_chain_entry(chain_id: str, chain: dict, config: dict, env_values: dict[str, str]):
factory = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", env_values)
router = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", env_values)
start_block = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK", env_values) or "0"
rpc_url = ""
for key in config["rpc_keys"]:
value = resolve_env_value(key, env_values)
if value:
rpc_url = value
break
env_ready = bool(factory and router and rpc_url)
pairs = []
discovered_rows = []
if env_ready:
for base, quote, token0, token1 in candidate_pairs(chain):
try:
pair_address = cast_call_cached(rpc_url, factory, "getPair(address,address)(address)", token0, token1)
except Exception as exc:
pair_address = f"ERROR:{exc}"
live = pair_address.lower() != ZERO_ADDRESS and not pair_address.startswith("ERROR:")
row = {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"live": live,
}
if live:
try:
row["health"] = compute_pair_health(rpc_url, pair_address, token0, token1)
except Exception as exc:
row["health"] = {"healthy": False, "error": str(exc)}
discovered_rows.append(
{
"chain_id": chain_id,
"row": {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"factoryAddress": factory,
"routerAddress": router,
"startBlock": int(start_block),
"venue": "uniswap_v2_pair",
"publicRoutingEnabled": False,
},
}
)
pairs.append(row)
entry = {
"chain_id": int(chain_id),
"network": chain.get("name"),
"factoryAddress": factory or None,
"routerAddress": router or None,
"startBlock": int(start_block),
"rpcConfigured": bool(rpc_url),
"envReady": env_ready,
"pairsChecked": pairs,
}
return entry, discovered_rows
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--write-discovered", action="store_true", help="Write discovered live pairs into deployment-status.json under uniswapV2Pools.")
args = parser.parse_args()
status = load_json(DEPLOYMENT_STATUS)
env_values = load_env(ENV_PATH)
entries_by_chain: dict[str, dict] = {}
discovered_for_write = []
jobs = []
with ThreadPoolExecutor(max_workers=min(8, len(CHAIN_CONFIG))) as executor:
for chain_id, config in CHAIN_CONFIG.items():
chain = status["chains"].get(chain_id)
if not chain:
continue
jobs.append((chain_id, executor.submit(build_chain_entry, chain_id, chain, config, env_values)))
for chain_id, future in jobs:
entry, discovered_rows = future.result()
entries_by_chain[chain_id] = entry
discovered_for_write.extend(discovered_rows)
entries = [entries_by_chain[chain_id] for chain_id in CHAIN_CONFIG if chain_id in entries_by_chain]
writes = []
if args.write_discovered:
for item in discovered_for_write:
changed = append_discovered_pair(status, item["chain_id"], item["row"])
if changed:
writes.append(item["row"])
if writes:
DEPLOYMENT_STATUS.write_text(json.dumps(status, indent=2) + "\n")
payload = {
"generated_at": now(),
"write_discovered": args.write_discovered,
"discovered_live_pair_count": len(discovered_for_write),
"healthy_live_pair_count": sum(
1 for entry in entries for row in entry["pairsChecked"] if row.get("health", {}).get("healthy") is True
),
"writes_applied": writes,
"entries": entries,
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Live Pair Discovery",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Live pairs discovered: `{payload['discovered_live_pair_count']}`",
f"- Healthy live pairs: `{payload['healthy_live_pair_count']}`",
f"- Write mode: `{payload['write_discovered']}`",
"",
"| Chain | Network | Env Ready | Live Pairs Found | Healthy Live Pairs |",
"|---|---|---|---|---|",
]
for entry in entries:
live_pairs = [f"`{row['base']}/{row['quote']}`" for row in entry["pairsChecked"] if row["live"]]
healthy_pairs = [
f"`{row['base']}/{row['quote']}`"
for row in entry["pairsChecked"]
if row.get("health", {}).get("healthy") is True
]
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['envReady']}` | {', '.join(live_pairs) if live_pairs else ''} | {', '.join(healthy_pairs) if healthy_pairs else ''} |"
)
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()