chore: sync workspace docs, configs, and submodules
All checks were successful
Deploy to Phoenix / deploy (push) Successful in 9s

This commit is contained in:
defiQUG
2026-04-18 12:07:15 -07:00
parent d63efcb315
commit 4fab998e51
182 changed files with 166285 additions and 24 deletions

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env bash
# Send a random amount between 5 and 9 ETH (inclusive) to each address in
# config/pmm-soak-wallet-grid.json (Elemental Imperium 33×33×6 matrix).
#
# Requires: cast (Foundry), jq, python3. Loads PRIVATE_KEY and RPC via load-project-env.sh.
#
# Usage:
# ./scripts/deployment/send-eth-ei-matrix-wallets.sh [--dry-run] [--limit N] [--offset N]
#
# --dry-run Print planned sends only (no transactions).
# --limit N Process at most N wallets (after offset). Default: all.
# --offset N Skip the first N wallets (resume / partial run).
#
# Gas (Chain 138 / Besu): defaults avoid stuck pending txs from near-zero EIP-1559 caps.
# Override if needed:
# EI_MATRIX_GAS_PRICE=100000000000
# EI_MATRIX_PRIORITY_GAS_PRICE=20000000000
#
# Nonces: each send uses an explicit --nonce from eth_getTransactionCount(..., "pending")
# and increments locally so --async does not race duplicate nonces.
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
DRY_RUN=false
LIMIT=""
OFFSET="0"
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run) DRY_RUN=true; shift ;;
--limit) LIMIT="${2:?}"; shift 2 ;;
--offset) OFFSET="${2:?}"; shift 2 ;;
*) echo "Unknown arg: $1" >&2; exit 1 ;;
esac
done
# shellcheck disable=SC1091
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
LOCK_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-eth-send.lock"
mkdir -p "$(dirname "$LOCK_FILE")"
exec 200>"$LOCK_FILE"
if ! flock -n 200; then
echo "Another send-eth-ei-matrix-wallets.sh is already running (lock: $LOCK_FILE)." >&2
exit 1
fi
RPC="${RPC_URL_138:-http://192.168.11.211:8545}"
GRID="$PROJECT_ROOT/config/pmm-soak-wallet-grid.json"
DEPLOYER_CANONICAL="0x4A666F96fC8764181194447A7dFdb7d471b301C8"
# Wei per gas — must exceed stuck-replacement threshold on busy pools (see script header).
EI_MATRIX_GAS_PRICE="${EI_MATRIX_GAS_PRICE:-100000000000}"
EI_MATRIX_PRIORITY_GAS_PRICE="${EI_MATRIX_PRIORITY_GAS_PRICE:-20000000000}"
[[ -f "$GRID" ]] || { echo "Missing $GRID" >&2; exit 1; }
command -v cast &>/dev/null || { echo "cast (Foundry) required" >&2; exit 1; }
command -v jq &>/dev/null || { echo "jq required" >&2; exit 1; }
[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY not set (source smom-dbis-138/.env or root .env)" >&2; exit 1; }
FROM_ADDR=$(cast wallet address --private-key "$PRIVATE_KEY")
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "EI matrix ETH distribution (random 59 ETH per wallet)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "RPC: $RPC"
echo "Signer: $FROM_ADDR"
echo "Grid: $GRID"
echo "Dry-run: $DRY_RUN"
echo "Offset: $OFFSET Limit: ${LIMIT:-all}"
echo "Gas: maxFee=$EI_MATRIX_GAS_PRICE wei priorityFee=$EI_MATRIX_PRIORITY_GAS_PRICE wei"
echo ""
if [[ "${FROM_ADDR,,}" != "${DEPLOYER_CANONICAL,,}" ]]; then
echo "[WARN] Signer is not canonical deployer $DEPLOYER_CANONICAL — continuing anyway."
echo ""
fi
pending_nonce() {
local resp hex
resp=$(curl -sS -X POST "$RPC" -H "Content-Type: application/json" \
-d "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getTransactionCount\",\"params\":[\"${FROM_ADDR}\",\"pending\"],\"id\":1}" 2>/dev/null) || return 1
hex=$(echo "$resp" | jq -r '.result // empty')
[[ -n "$hex" ]] || return 1
cast to-dec "$hex"
}
random_wei() {
python3 -c "import random; from decimal import Decimal; print(int(Decimal(str(random.uniform(5.0, 9.0))) * 10**18))"
}
ERR_LOG="${PROJECT_ROOT}/reports/status/ei-matrix-eth-send-failures.log"
mkdir -p "$(dirname "$ERR_LOG")"
if ! $DRY_RUN; then
NONCE=$(pending_nonce) || { echo "Could not read pending nonce for $FROM_ADDR" >&2; exit 1; }
echo "Starting nonce (pending): $NONCE"
echo ""
else
NONCE=0
fi
stream_addresses() {
if [[ -n "${LIMIT:-}" ]]; then
jq -r '.wallets[] | .address' "$GRID" | tail -n +$((OFFSET + 1)) | head -n "$LIMIT"
else
jq -r '.wallets[] | .address' "$GRID" | tail -n +$((OFFSET + 1))
fi
}
sent=0
failed=0
idx=$OFFSET
while read -r addr; do
wei=$(random_wei)
eth_approx=$(python3 -c "print(f'{$wei / 1e18:.6f}')")
if $DRY_RUN; then
echo "[dry-run] idx=$idx $addr ${wei} wei (~${eth_approx} ETH)"
else
GP="$EI_MATRIX_GAS_PRICE"
PP="$EI_MATRIX_PRIORITY_GAS_PRICE"
attempt=1
while [[ "$attempt" -le 2 ]]; do
if out=$(cast send "$addr" --value "$wei" --rpc-url "$RPC" --private-key "$PRIVATE_KEY" \
--nonce "$NONCE" \
--async \
--gas-price "$GP" \
--priority-gas-price "$PP" \
2>&1); then
tx=$(echo "$out" | tail -n1)
echo "[ok] idx=$idx nonce=$NONCE $addr ${eth_approx} ETH tx=$tx"
sent=$((sent + 1))
NONCE=$((NONCE + 1))
echo "$idx" > "${PROJECT_ROOT}/reports/status/ei-matrix-eth-send-last-idx.txt"
break
fi
if echo "$out" | grep -q "Replacement transaction underpriced" && [[ "$attempt" -eq 1 ]]; then
GP=$((GP * 2))
PP=$((PP * 2))
NONCE=$(pending_nonce) || true
attempt=$((attempt + 1))
continue
fi
echo "[fail] idx=$idx nonce=$NONCE $addr $out" | tee -a "$ERR_LOG" >&2
failed=$((failed + 1))
NONCE=$(pending_nonce) || true
break
done
fi
idx=$((idx + 1))
done < <(stream_addresses)
echo ""
if $DRY_RUN; then
echo "Dry-run complete (no transactions sent). Indices covered: $OFFSET..$((idx - 1))."
else
echo "Done. Sent: $sent Failed: $failed"
fi

View File

@@ -0,0 +1,131 @@
#!/usr/bin/env python3
"""
Apply Elemental Imperium matrix labels to config/pmm-soak-wallet-grid.json.
Each wallet gains:
cellId — immutable coordinate label (EI-L##-B##-C#)
networkCode — from registry default, overlay rangeRules, then cellOverrides
asn — same layering; null until known
label — {networkCode}.{cellId}[.AS{n}]
Run from repo root:
python3 scripts/lib/apply_elemental_imperium_wallet_labels.py
Options:
--network-code CODE Base networkCode for all wallets (before overlay).
--asn N Base ASN for all wallets (before overlay).
--overlay PATH JSON overlay (see config/elemental-imperium-wallet-overlay.example.json)
"""
from __future__ import annotations
import argparse
import json
import sys
from pathlib import Path
_REPO_LIB = Path(__file__).resolve().parent
if str(_REPO_LIB) not in sys.path:
sys.path.insert(0, str(_REPO_LIB))
from elemental_imperium_wallet_common import ( # noqa: E402
build_label,
cell_id,
resolve_network_asn,
)
def load_registry(repo_root: Path) -> tuple[str, dict]:
reg_path = repo_root / "config" / "elemental-imperium-network-registry.json"
reg = json.loads(reg_path.read_text(encoding="utf-8"))
default = reg.get("defaultNetworkCode", "unassigned")
return default, reg
def load_overlay(path: Path) -> dict:
o = json.loads(path.read_text(encoding="utf-8"))
if not isinstance(o, dict):
raise SystemExit("Overlay must be a JSON object")
return o
def main() -> None:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--network-code",
dest="network_code",
default=None,
help="Base networkCode for every wallet (before overlay)",
)
parser.add_argument(
"--asn",
type=int,
default=None,
help="Base ASN for every wallet (before overlay)",
)
parser.add_argument(
"--overlay",
type=Path,
default=None,
help="Path to overlay JSON (rangeRules + cellOverrides)",
)
args = parser.parse_args()
repo_root = Path(__file__).resolve().parents[2]
grid_path = repo_root / "config" / "pmm-soak-wallet-grid.json"
default_net, registry = load_registry(repo_root)
base_network = args.network_code or default_net
base_asn = args.asn
overlay: dict | None = None
overlay_path_str: str | None = None
if args.overlay:
overlay = load_overlay(args.overlay.resolve())
overlay_path_str = str(args.overlay)
data = json.loads(grid_path.read_text(encoding="utf-8"))
wallets = data.get("wallets")
if not isinstance(wallets, list):
raise SystemExit("Invalid grid: missing wallets array")
spec = registry.get("labelSpec", {})
labeling = {
"specId": spec.get("id", "ei-wallet-label-v1"),
"registryPath": "config/elemental-imperium-network-registry.json",
"defaultNetworkCode": default_net,
"baseNetworkCode": base_network,
"baseAsn": base_asn,
"overlayPath": overlay_path_str,
"cellIdFormat": spec.get("cellIdFormat", "EI-L{lpbca:02d}-B{branch:02d}-C{class}"),
"labelFormats": {
"default": spec.get("labelFormatDefault", "{networkCode}.{cellId}"),
"withAsn": spec.get("labelFormatWithAsn", "{networkCode}.{cellId}.AS{asn}"),
},
}
for w in wallets:
lpbca = int(w["lpbca"])
branch = int(w["branch"])
class_ = int(w["class"])
cid = cell_id(lpbca, branch, class_)
w["cellId"] = cid
net, asn = resolve_network_asn(w, cid, base_network, base_asn, overlay)
w["networkCode"] = net
w["asn"] = asn
w["label"] = build_label(net, cid, asn)
ver = max(int(data.get("version", 1)), 2)
out = {
"version": ver,
"dimensions": data["dimensions"],
"derivation": data["derivation"],
"labeling": labeling,
"wallets": wallets,
}
grid_path.write_text(json.dumps(out, indent=2, sort_keys=False) + "\n", encoding="utf-8")
print(f"Wrote {len(wallets)} wallets to {grid_path}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,279 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import os
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS_PATH = REPO_ROOT / "cross-chain-pmm-lps/config/deployment-status.json"
DOC_PATH = REPO_ROOT / "docs/03-deployment/CHAIN_138_TO_WEMIX_1111_BRIDGE_COMPLETION_CHECKLIST.md"
JSON_PATH = REPO_ROOT / "reports/extraction/chain-138-to-wemix-1111-bridge-completion-checklist-latest.json"
def load_env(repo_root: Path) -> None:
for env_path in (repo_root / ".env", repo_root / "smom-dbis-138/.env"):
if not env_path.exists():
continue
for raw_line in env_path.read_text().splitlines():
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
os.environ.setdefault(key.strip(), value.strip().strip("'").strip('"'))
def env_value(key: str) -> str:
return os.environ.get(key, "")
def main() -> None:
load_env(REPO_ROOT)
deployment_status = json.loads(DEPLOYMENT_STATUS_PATH.read_text())
chain_1111 = deployment_status["chains"]["1111"]
env_snapshot = {
"CCIP_ROUTER_WEMIX": env_value("CCIP_ROUTER_WEMIX"),
"LINK_TOKEN_WEMIX": env_value("LINK_TOKEN_WEMIX"),
"WETH9_WEMIX": env_value("WETH9_WEMIX"),
"WETH10_WEMIX": env_value("WETH10_WEMIX"),
"CCIPWETH9_BRIDGE_WEMIX": env_value("CCIPWETH9_BRIDGE_WEMIX"),
"CCIPWETH10_BRIDGE_WEMIX": env_value("CCIPWETH10_BRIDGE_WEMIX"),
"LINK_GAS_AVAILABLE_1111": env_value("LINK_GAS_AVAILABLE_1111"),
"LINK_GAS_UNAVAILABLE_CHAIN_IDS": env_value("LINK_GAS_UNAVAILABLE_CHAIN_IDS"),
"WEMIX_RPC": env_value("WEMIX_RPC"),
"CHAIN138_SELECTOR": env_value("CHAIN138_SELECTOR"),
"CCIPWETH9_BRIDGE_CHAIN138": env_value("CCIPWETH9_BRIDGE_CHAIN138"),
"CCIPWETH10_BRIDGE_CHAIN138": env_value("CCIPWETH10_BRIDGE_CHAIN138"),
}
selector_1111 = "5142893604156789321"
checklist = {
"title": "Chain 138 -> Wemix 1111 Bridge Completion Checklist",
"last_updated": "2026-04-18",
"source_chain_id": 138,
"destination_chain_id": 1111,
"destination_selector": selector_1111,
"current_repo_state": {
"activation_state": chain_1111["activationState"],
"bridge_available": chain_1111["bridgeAvailable"],
"cw_tokens": chain_1111.get("cwTokens", {}),
"anchor_addresses": chain_1111.get("anchorAddresses", {}),
"gas_mirrors": chain_1111.get("gasMirrors", {}),
"gas_quote_addresses": chain_1111.get("gasQuoteAddresses", {}),
},
"environment": env_snapshot,
"promotion_conditions": [
"CCIPWETH9_BRIDGE_WEMIX and CCIPWETH10_BRIDGE_WEMIX are deployed and recorded in smom-dbis-138/.env",
"Wemix bridges include CHAIN138_SELECTOR as a destination and Chain 138 bridges include selector 5142893604156789321 as a destination",
"LINK_GAS_AVAILABLE_1111 is non-zero and both WEMIX bridges hold operational LINK balances",
"A small WETH9 or WETH10 test transfer succeeds from Chain 138 to 1111 and back",
"GRU v2 asset rollout prerequisites are satisfied for the specific cW* assets to be enabled on 1111",
],
"steps": [
{
"step": 1,
"title": "Preflight WEMIX configuration",
"commands": [
"cd smom-dbis-138",
"./scripts/deployment/preflight-config-ready-chains.sh wemix",
],
"checks": [
"WEMIX_RPC resolves and responds",
"deployer has native WEMIX gas",
"CCIP_ROUTER_WEMIX, LINK_TOKEN_WEMIX, WETH9_WEMIX, and WETH10_WEMIX are set",
],
},
{
"step": 2,
"title": "Deploy WETH bridges on chain 1111",
"commands": [
"cd smom-dbis-138",
"./scripts/deployment/deploy-bridges-config-ready-chains.sh wemix",
],
"checks": [
"capture deployed CCIPWETH9Bridge address into CCIPWETH9_BRIDGE_WEMIX",
"capture deployed CCIPWETH10Bridge address into CCIPWETH10_BRIDGE_WEMIX",
],
},
{
"step": 3,
"title": "Wire destinations in both directions",
"commands": [
"cd smom-dbis-138",
"./scripts/deployment/complete-config-ready-chains.sh",
],
"checks": [
"Chain 138 WETH9 bridge contains selector 5142893604156789321 -> CCIPWETH9_BRIDGE_WEMIX",
"Chain 138 WETH10 bridge contains selector 5142893604156789321 -> CCIPWETH10_BRIDGE_WEMIX",
"Wemix WETH9 bridge contains CHAIN138_SELECTOR -> CCIPWETH9_BRIDGE_CHAIN138",
"Wemix WETH10 bridge contains CHAIN138_SELECTOR -> CCIPWETH10_BRIDGE_CHAIN138",
],
},
{
"step": 4,
"title": "Fund LINK on WEMIX bridges",
"commands": [
"cd smom-dbis-138",
"./scripts/deployment/check-link-balance-config-ready-chains.sh wemix",
"./scripts/deployment/fund-ccip-bridges-with-link.sh",
],
"checks": [
"target at least 10 LINK per bridge",
"LINK_GAS_AVAILABLE_1111 flips from 0 to a positive value",
],
},
{
"step": 5,
"title": "Verify bridge readiness",
"commands": [
"cast call \"$CCIPWETH9_BRIDGE_WEMIX\" 'getDestinationChains()(uint64[])' --rpc-url \"$WEMIX_RPC\"",
"cast call \"$CCIPWETH10_BRIDGE_WEMIX\" 'getDestinationChains()(uint64[])' --rpc-url \"$WEMIX_RPC\"",
],
"checks": [
"both directions show configured selectors",
"small WETH9/WETH10 test transfer succeeds",
],
},
{
"step": 6,
"title": "Prepare GRU v2 assets for 1111",
"commands": [],
"checks": [
"confirm which cW* assets beyond cWBTC are intended for 1111",
"deploy or confirm destination cW* token contracts on 1111",
"wire bridge or mint authority for those assets",
"only after asset delivery is live, promote DODO PMM / Uniswap V2 / other venue rollout on 1111",
],
},
],
}
markdown = f"""# Chain 138 -> Wemix 1111 Bridge Completion Checklist
**Last Updated:** 2026-04-18
**Purpose:** Strict operator checklist to move WEMIX `1111` from repo-deferred state into a live `138 -> 1111` bridge path for `WETH9` / `WETH10`, then prepare GRU v2 `cW*` assets for rollout.
## 1. Current Repo State
| Item | Current State |
|---|---|
| Destination chain | `1111` / Wemix |
| Destination selector | `{selector_1111}` |
| Activation state | `{checklist["current_repo_state"]["activation_state"]}` |
| Bridge available | `{str(checklist["current_repo_state"]["bridge_available"]).lower()}` |
| Recorded `cW*` assets | `{", ".join(checklist["current_repo_state"]["cw_tokens"].keys()) or "none"}` |
| Recorded anchor addresses | `{", ".join(f"{k}={v}" for k, v in checklist["current_repo_state"]["anchor_addresses"].items())}` |
The repo currently shows WEMIX as **deferred**, not live. Native WEMIX gas is the only part that is already in good shape; the missing bridge deployment and LINK funding are the actual blockers.
## 2. Exact Environment Surface
| Variable | Current Value |
|---|---|
| `CCIP_ROUTER_WEMIX` | `{env_snapshot["CCIP_ROUTER_WEMIX"] or "(unset)"}` |
| `LINK_TOKEN_WEMIX` | `{env_snapshot["LINK_TOKEN_WEMIX"] or "(unset)"}` |
| `WETH9_WEMIX` | `{env_snapshot["WETH9_WEMIX"] or "(unset)"}` |
| `WETH10_WEMIX` | `{env_snapshot["WETH10_WEMIX"] or "(unset)"}` |
| `CCIPWETH9_BRIDGE_WEMIX` | `{env_snapshot["CCIPWETH9_BRIDGE_WEMIX"] or "(unset)"}` |
| `CCIPWETH10_BRIDGE_WEMIX` | `{env_snapshot["CCIPWETH10_BRIDGE_WEMIX"] or "(unset)"}` |
| `LINK_GAS_AVAILABLE_1111` | `{env_snapshot["LINK_GAS_AVAILABLE_1111"] or "(unset)"}` |
| `LINK_GAS_UNAVAILABLE_CHAIN_IDS` | `{env_snapshot["LINK_GAS_UNAVAILABLE_CHAIN_IDS"] or "(unset)"}` |
| `WEMIX_RPC` | `{env_snapshot["WEMIX_RPC"] or "(unset)"}` |
| `CHAIN138_SELECTOR` | `{env_snapshot["CHAIN138_SELECTOR"] or "(unset)"}` |
| `CCIPWETH9_BRIDGE_CHAIN138` | `{env_snapshot["CCIPWETH9_BRIDGE_CHAIN138"] or "(unset)"}` |
| `CCIPWETH10_BRIDGE_CHAIN138` | `{env_snapshot["CCIPWETH10_BRIDGE_CHAIN138"] or "(unset)"}` |
## 3. Completion Steps
### Step 1: Preflight WEMIX
```bash
cd smom-dbis-138
./scripts/deployment/preflight-config-ready-chains.sh wemix
```
Success means:
- `WEMIX_RPC` responds
- deployer has native WEMIX gas
- `CCIP_ROUTER_WEMIX`, `LINK_TOKEN_WEMIX`, `WETH9_WEMIX`, and `WETH10_WEMIX` are present
### Step 2: Deploy `CCIPWETH9Bridge` and `CCIPWETH10Bridge` on 1111
```bash
cd smom-dbis-138
./scripts/deployment/deploy-bridges-config-ready-chains.sh wemix
```
After deployment, persist the returned addresses into:
- `CCIPWETH9_BRIDGE_WEMIX`
- `CCIPWETH10_BRIDGE_WEMIX`
### Step 3: Wire destinations in both directions
```bash
cd smom-dbis-138
./scripts/deployment/complete-config-ready-chains.sh
```
Required result:
- Chain `138` WETH9 bridge includes selector `{selector_1111}` -> `CCIPWETH9_BRIDGE_WEMIX`
- Chain `138` WETH10 bridge includes selector `{selector_1111}` -> `CCIPWETH10_BRIDGE_WEMIX`
- WEMIX WETH9 bridge includes `CHAIN138_SELECTOR` -> `CCIPWETH9_BRIDGE_CHAIN138`
- WEMIX WETH10 bridge includes `CHAIN138_SELECTOR` -> `CCIPWETH10_BRIDGE_CHAIN138`
### Step 4: Fund LINK on the WEMIX bridges
```bash
cd smom-dbis-138
./scripts/deployment/check-link-balance-config-ready-chains.sh wemix
./scripts/deployment/fund-ccip-bridges-with-link.sh
```
Target:
- at least `10 LINK` on each bridge
- `LINK_GAS_AVAILABLE_1111` becomes non-zero
### Step 5: Verify bridge readiness
```bash
cast call "$CCIPWETH9_BRIDGE_WEMIX" 'getDestinationChains()(uint64[])' --rpc-url "$WEMIX_RPC"
cast call "$CCIPWETH10_BRIDGE_WEMIX" 'getDestinationChains()(uint64[])' --rpc-url "$WEMIX_RPC"
```
Then run a small test transfer for `WETH9` or `WETH10` from `138 -> 1111` and confirm receipt.
### Step 6: Prepare GRU v2 `cW*` assets for 1111
This step is **after** the WETH bridge path is proven live.
Current repo state only scaffolds:
- `cWBTC`
Before GRU v2 can be promoted on `1111`, the operator still needs to:
- confirm the intended destination `cW*` asset set
- deploy or confirm destination token contracts on `1111`
- wire bridge or mint authority for those assets
- only then enable DODO PMM / Uniswap V2 / other venue rollout on WEMIX
## 4. Promotion Conditions
WEMIX `1111` can move out of deferred state only after all of the following are true:
1. `CCIPWETH9_BRIDGE_WEMIX` and `CCIPWETH10_BRIDGE_WEMIX` are deployed and stored in `.env`.
2. Chain `138` and chain `1111` each list the other as a valid bridge destination.
3. Both WEMIX bridges hold enough LINK for operational CCIP fees.
4. A small bridge test succeeds in both configuration and receipt.
5. The GRU v2 asset rollout plan for `1111` is explicitly wired, not just implied.
"""
JSON_PATH.parent.mkdir(parents=True, exist_ok=True)
DOC_PATH.parent.mkdir(parents=True, exist_ok=True)
JSON_PATH.write_text(json.dumps(checklist, indent=2) + "\n")
DOC_PATH.write_text(markdown)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env python3
from pathlib import Path
import json, time
ROOT = Path(__file__).resolve().parents[2]
REPORTS = ROOT / "reports" / "extraction"
DOCS = ROOT / "docs" / "03-deployment"
CONFIG = ROOT / "config" / "extraction"
def load(p): return json.loads(p.read_text())
def write(p, data): p.parent.mkdir(parents=True, exist_ok=True); p.write_text(json.dumps(data, indent=2)+"\n")
def write_text(p, text): p.parent.mkdir(parents=True, exist_ok=True); p.write_text(text.rstrip()+"\n")
def now(): return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
review = load(REPORTS / 'immediate-and-same-day-corridor-assets-latest.json')
plan = load(REPORTS / 'source-to-cex-execution-plan-latest.json')
ready = load(REPORTS / 'source-to-cex-production-readiness-latest.json')
validation_path = REPORTS / 'source-to-cex-offchain-sink-validation-latest.json'
validation = load(validation_path) if validation_path.exists() else None
inv = load(CONFIG / 'additional-wallet-inventory.json')
summary = {
'generated_at': now(),
'wallet': '0x4A666F96fC8764181194447A7dFdb7d471b301C8',
'mainnet_funding_posture': plan.get('mainnet_funding_posture'),
'scope': {
'included_wallets': ['0x4A666F96fC8764181194447A7dFdb7d471b301C8'],
'additional_wallet_inventory': 'config/extraction/additional-wallet-inventory.json',
'additional_wallet_inventory_mode': 'user-supplied',
'source_to_cex_policy': 'config/extraction/source-to-cex-production-policy.json',
'source_to_cex_production_enabled': False
},
'net_worth_views_usd': {
'near_immediate_mainnet_cw_exit': review['bucket_subtotals_usd']['immediate'],
'same_day_corridor_total': review['bucket_subtotals_usd']['same_day_corridor']
},
'additional_inventory': {
'inventory_mode': 'user-supplied',
'production_sinks': inv.get('offchain_accounts', []),
'offchain_count': len(inv.get('offchain_accounts', [])),
'offchain_sink_validation': validation
},
'source_artifacts': {
'immediate_and_same_day_corridor_assets': 'reports/extraction/immediate-and-same-day-corridor-assets-latest.json',
'source_to_cex_execution_plan': 'reports/extraction/source-to-cex-execution-plan-latest.json',
'source_to_cex_production_readiness': 'reports/extraction/source-to-cex-production-readiness-latest.json',
'source_to_cex_offchain_sink_validation': 'reports/extraction/source-to-cex-offchain-sink-validation-latest.json',
'source_to_cex_production_policy': 'config/extraction/source-to-cex-production-policy.json',
'additional_wallet_inventory': 'config/extraction/additional-wallet-inventory.json'
}
}
write(REPORTS / 'comprehensive-capital-baseline-latest.json', summary)
lines = ['# Comprehensive Capital Baseline','',f"- Generated: `{summary['generated_at']}`",f"- Additional inventory: `{summary['scope']['additional_wallet_inventory']}`",f"- Source-to-CEX policy: `{summary['scope']['source_to_cex_policy']}`",f"- Production enabled: `{summary['scope']['source_to_cex_production_enabled']}`"]
if summary.get('mainnet_funding_posture'):
lines.append(f"- Mainnet funding posture: `{summary['mainnet_funding_posture']['mode']}` via `{', '.join(summary['mainnet_funding_posture']['required_deployer_assets'])}`")
lines += ['', '## Snapshot','',f"- Immediate bucket USD: `{summary['net_worth_views_usd']['near_immediate_mainnet_cw_exit']}`",f"- Same-day corridor USD: `{summary['net_worth_views_usd']['same_day_corridor_total']}`"]
if validation:
lines += ['', '## Off-Chain Sink Validation', '', f"- Validation ready: `{validation['ready']}`", f"- Included sink count: `{validation['included_sink_count']}`"]
for warning in (validation or {}).get('warnings', []):
lines.append(f"- Warning: {warning}")
lines += ['', '## Source Artifacts', '']
for k,v in summary['source_artifacts'].items(): lines.append(f'- `{k}`: `{v}`')
write_text(DOCS / 'COMPREHENSIVE_CAPITAL_BASELINE.md', '\n'.join(lines))
print(REPORTS / 'comprehensive-capital-baseline-latest.json')

View File

@@ -0,0 +1,270 @@
#!/usr/bin/env python3
"""
Machine-readable dump of cW* tokens vs USD-like PMM quotes from deployment-status.json.
Reads cross-chain-pmm-lps/config/deployment-status.json, finds pools where base == symbol
and quote is USDC/USDT/cWUSDC/cWUSDT, then calls getMidPrice and getVaultReserve on-chain.
Usage:
python3 scripts/lib/dump_cw_usd_quotes.py [--output PATH]
Requires: cast (foundry), RPC URLs in smom-dbis-138/.env (or env already exported).
"""
from __future__ import annotations
import argparse
import json
import os
import subprocess
import sys
from datetime import datetime, timezone
from decimal import Decimal
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
DEFAULT_ENV = ROOT / "smom-dbis-138" / ".env"
DEFAULT_OUT = ROOT / "output" / "cw-assets-usd-quote-dump.json"
CHAIN_RPC_ENV = {
"1": "ETHEREUM_MAINNET_RPC",
"10": "OPTIMISM_MAINNET_RPC",
"25": "CRONOS_RPC",
"56": "BSC_MAINNET_RPC",
"100": "GNOSIS_MAINNET_RPC",
"137": "POLYGON_MAINNET_RPC",
"42161": "ARBITRUM_MAINNET_RPC",
"42220": "CELO_MAINNET_RPC",
"43114": "AVALANCHE_MAINNET_RPC",
"8453": "BASE_MAINNET_RPC",
"138": "RPC_URL_138",
}
USD_LIKE = frozenset({"USDC", "USDT", "cWUSDC", "cWUSDT"})
def load_dotenv(path: Path) -> dict[str, str]:
out: dict[str, str] = {}
if not path.is_file():
return out
for line in path.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
k, v = line.split("=", 1)
out[k] = v
return out
def cast(env: dict[str, str], rpc: str, args: list[str], timeout: float = 12.0) -> tuple[str, int]:
try:
r = subprocess.run(
["cast", *args, "--rpc-url", rpc],
capture_output=True,
text=True,
timeout=timeout,
env={**os.environ, **env},
)
return (r.stdout or "").strip(), r.returncode
except Exception as e:
return str(e), -1
def parse_u256(s: str) -> int | None:
if not s:
return None
t = s.split()[0].strip()
if "[" in t:
t = t.split("[")[0]
if t.startswith("0x"):
return int(t, 16)
return int(float(t)) if "e" in t.lower() else int(t)
def find_usd_pool(ch: dict, sym: str) -> tuple[str | None, str | None]:
for src in ("pmmPools", "pmmPoolsVolatile"):
for p in ch.get(src) or []:
if p.get("base") != sym:
continue
q = p.get("quote") or ""
if q in USD_LIKE:
return p.get("poolAddress"), q
return None, None
def main() -> int:
ap = argparse.ArgumentParser()
ap.add_argument(
"--output",
"-o",
type=Path,
default=DEFAULT_OUT,
help=f"JSON output path (default: {DEFAULT_OUT})",
)
ap.add_argument(
"--env-file",
type=Path,
default=DEFAULT_ENV,
help="Dotenv with RPC URLs",
)
ap.add_argument(
"--deployment-status",
type=Path,
default=DEPLOYMENT_STATUS,
)
args = ap.parse_args()
env = {**os.environ, **load_dotenv(args.env_file)}
ds = json.loads(args.deployment_status.read_text())
dec_cache: dict[tuple[str, str], int] = {}
def decimals(addr: str, rpc: str) -> int:
key = (rpc[:32], addr.lower())
if key in dec_cache:
return dec_cache[key]
out, code = cast(env, rpc, ["call", addr, "decimals()(uint8)"])
d = int(parse_u256(out)) if code == 0 and out else 18
dec_cache[key] = d
return d
entries: list[dict] = []
gas_mirrors: list[dict] = []
for cid, ch in sorted(ds.get("chains", {}).items(), key=lambda x: int(x[0])):
rpc_key = CHAIN_RPC_ENV.get(cid)
rpc = (env.get(rpc_key) or "").strip() if rpc_key else ""
net = ch.get("name", cid)
gm = ch.get("gasMirrors") or {}
for sym, addr in gm.items():
if sym.startswith("cW"):
gas_mirrors.append(
{
"chain_id": int(cid),
"network": net,
"symbol": sym,
"token_address": addr,
}
)
if not rpc_key or not rpc:
for sym, addr in (ch.get("cwTokens") or {}).items():
if not sym.startswith("cW"):
continue
entries.append(
{
"chain_id": int(cid),
"network": net,
"symbol": sym,
"token_address": addr,
"rpc_env": rpc_key,
"error": "rpc_env_missing_or_empty",
}
)
continue
for sym, addr in (ch.get("cwTokens") or {}).items():
if not sym.startswith("cW"):
continue
pool, qlab = find_usd_pool(ch, sym)
row: dict = {
"chain_id": int(cid),
"network": net,
"symbol": sym,
"token_address": addr,
"rpc_env": rpc_key,
"quote_leg": qlab,
"pool_address": pool,
}
if not pool:
row["error"] = "no_usd_quoted_pool_in_deployment_status"
entries.append(row)
continue
code, cok = cast(env, rpc, ["code", pool])
if cok != 0 or not code or code == "0x":
row["error"] = "pool_no_bytecode"
entries.append(row)
continue
bout, bok = cast(env, rpc, ["call", pool, "_BASE_TOKEN_()(address)"])
qout, qok = cast(env, rpc, ["call", pool, "_QUOTE_TOKEN_()(address)"])
if bok != 0 or qok != 0:
row["error"] = "not_dvm_abi"
entries.append(row)
continue
base_a = bout.split()[0]
quote_a = qout.split()[0]
row["base_token_address"] = base_a
row["quote_token_address"] = quote_a
bd = decimals(base_a, rpc)
qd = decimals(quote_a, rpc)
row["base_decimals"] = bd
row["quote_decimals"] = qd
rv, rcode = cast(env, rpc, ["call", pool, "getVaultReserve()(uint256,uint256)"])
vault_ratio: str | None = None
if rcode == 0 and rv:
nums: list[int] = []
for p in rv.replace(",", " ").split():
p = p.strip()
if p and (p[0].isdigit() or p.startswith("0x")):
try:
nums.append(parse_u256(p))
except Exception:
pass
if len(nums) >= 2:
br, qr = nums[0], nums[1]
bh = Decimal(br) / Decimal(10**bd)
qh = Decimal(qr) / Decimal(10**qd)
if bh > 0:
vault_ratio = str((qh / bh).quantize(Decimal("1." + "0" * 18)))
mp, mok = cast(env, rpc, ["call", pool, "getMidPrice()(uint256)"])
mid_raw: str | None = None
mid_over_1e18: str | None = None
if mok == 0 and mp:
try:
m = parse_u256(mp)
mid_raw = str(m)
mid_over_1e18 = str((Decimal(m) / Decimal(10**18)).quantize(Decimal("1." + "0" * 18)))
except Exception as e:
row["mid_price_error"] = str(e)
if vault_ratio is not None:
row["vault_implied_quote_per_base"] = vault_ratio
if mid_raw is not None:
row["mid_price_raw_uint"] = mid_raw
if mid_over_1e18 is not None:
row["mid_price_over_1e18"] = mid_over_1e18
if vault_ratio is None and mid_over_1e18 is None:
row["error"] = "mid_and_vault_unavailable"
entries.append(row)
payload = {
"schema_version": 1,
"generated_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
"source_file": str(args.deployment_status.relative_to(ROOT)),
"description": (
"cW* cwTokens: PMM mid (getMidPrice) and vault-implied ratio vs USDC/USDT/cWUSDC/cWUSDT "
"from deployment-status pools. mid_price_over_1e18 is quote per base in human scale when "
"DODO uses 18-decimal mid; vault_implied_quote_per_base is quote_reserve/base_reserve."
),
"gas_mirrors": gas_mirrors,
"entries": sorted(entries, key=lambda r: (r["symbol"], r["chain_id"])),
}
args.output.parent.mkdir(parents=True, exist_ok=True)
args.output.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8")
print(str(args.output), file=sys.stderr)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,59 @@
"""Shared helpers for Elemental Imperium 33×33×6 wallet matrix labeling."""
from __future__ import annotations
def linear_index(lpbca: int, branch: int, class_: int) -> int:
return lpbca * 198 + branch * 6 + class_
def cell_id(lpbca: int, branch: int, class_: int) -> str:
return f"EI-L{lpbca:02d}-B{branch:02d}-C{class_}"
def build_label(network_code: str, cid: str, asn: int | None) -> str:
base = f"{network_code}.{cid}"
if asn is not None:
return f"{base}.AS{asn}"
return base
def match_range_rule(wallet: dict, match: dict) -> bool:
"""Each key in match is [min, max] inclusive for lpbca, branch, class. Empty {} matches all."""
dims = ("lpbca", "branch", "class")
for d in dims:
if d not in match:
continue
pair = match[d]
if not isinstance(pair, list) or len(pair) != 2:
raise ValueError(f"match.{d} must be [min, max]")
lo, hi = int(pair[0]), int(pair[1])
v = int(wallet[d])
if not (lo <= v <= hi):
return False
return True
def resolve_network_asn(
wallet: dict,
cid: str,
base_network: str,
base_asn: int | None,
overlay: dict | None,
) -> tuple[str, int | None]:
"""Apply rangeRules (first match wins), then cellOverrides."""
net, asn = base_network, base_asn
if overlay:
for rule in overlay.get("rangeRules", []):
if match_range_rule(wallet, rule.get("match") or {}):
net = rule["networkCode"]
if "asn" in rule:
asn = rule["asn"]
break
ovr = overlay.get("cellOverrides", {}).get(cid)
if isinstance(ovr, dict):
if "networkCode" in ovr:
net = ovr["networkCode"]
if "asn" in ovr:
asn = ovr["asn"]
return net, asn

View File

@@ -0,0 +1,211 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import os
import subprocess
import sys
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
WAVE2 = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave2-operator-sequence-latest.json"
STABLE_RAW = 1_000_000_000
ASSET_RAW = {
"cWCADC": 1_350_000_000,
"cWCHFC": 850_000_000,
"cWEURT": 900_000_000,
"cWGBPT": 750_000_000,
"cWJPYC": 150_000_000_000,
"cWXAUC": 300_000,
"cWXAUT": 300_000,
"cWAUSDT": 1_500_000_000,
"cWUSDW": 1_000_000_000,
}
SKIP_ASSETS = {"cWBTC"}
MAX_UINT = str(2**256 - 1)
RPC_ENV_KEYS = {
1: ["ALL_MAINNET_RPC", "ETHEREUM_MAINNET_RPC"],
10: ["OPTIMISM_MAINNET_RPC", "OPTIMISM_RPC_URL"],
25: ["CRONOS_MAINNET_RPC", "CRONOS_RPC_URL", "CRONOS_RPC"],
56: ["BSC_MAINNET_RPC", "BSC_RPC_URL"],
100: ["GNOSIS_MAINNET_RPC", "GNOSIS_RPC_URL", "GNOSIS_RPC"],
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
8453: ["BASE_MAINNET_RPC", "BASE_RPC_URL"],
42161: ["ARBITRUM_MAINNET_RPC", "ARBITRUM_RPC_URL"],
42220: ["CELO_MAINNET_RPC", "CELO_RPC_URL", "CELO_RPC"],
43114: ["AVALANCHE_MAINNET_RPC", "AVALANCHE_RPC_URL"],
}
def load_json(path: Path):
return json.loads(path.read_text())
def run(cmd: list[str], check: bool = True) -> str:
print("+", " ".join(cmd), flush=True)
proc = subprocess.run(cmd, text=True, capture_output=True)
if proc.stdout:
print(proc.stdout.strip(), flush=True)
if proc.stderr:
print(proc.stderr.strip(), file=sys.stderr, flush=True)
if check and proc.returncode != 0:
raise RuntimeError(f"command failed: {' '.join(cmd)}")
return proc.stdout.strip()
def cast_call(rpc: str, to: str, sig: str, *args: str) -> str:
return run(["cast", "call", to, sig, *args, "--rpc-url", rpc])
def cast_send(rpc: str, pk: str, to: str, sig: str, *args: str) -> str:
return run(["cast", "send", to, sig, *args, "--rpc-url", rpc, "--private-key", pk])
def parse_uint(value: str) -> int:
# `cast` may append a human-readable hint like `1005375 [1.005e6]`.
return int(value.strip().split()[0])
def resolve_rpc(chain_id: int) -> str:
for key in RPC_ENV_KEYS.get(chain_id, []):
value = os.environ.get(key)
if value:
return value
raise RuntimeError(f"missing RPC env for chain {chain_id}")
def parse_chain_list(value: str | None) -> set[int]:
if not value:
return set()
result: set[int] = set()
for part in value.split(","):
part = part.strip()
if not part:
continue
result.add(int(part))
return result
def chain_status(chain_id: int, status: dict) -> dict:
return status["chains"][str(chain_id)]
def token_addr(chain: dict, symbol: str) -> str:
value = chain["cwTokens"][symbol]
if isinstance(value, dict):
return value.get("address") or value.get("token")
return value
def has_code(rpc: str, address: str) -> bool:
code = run(["cast", "code", address, "--rpc-url", rpc], check=False).strip()
return code not in ("", "0x")
def balance_of(rpc: str, token: str, owner: str) -> int:
return parse_uint(cast_call(rpc, token, "balanceOf(address)(uint256)", owner))
def allowance_of(rpc: str, token: str, owner: str, spender: str) -> int:
return parse_uint(cast_call(rpc, token, "allowance(address,address)(uint256)", owner, spender))
def ensure_mint_balance(rpc: str, pk: str, token: str, owner: str, needed: int) -> None:
current = balance_of(rpc, token, owner)
if current >= needed:
return
shortfall = needed - current
cast_send(rpc, pk, token, "mint(address,uint256)", owner, str(shortfall))
def ensure_approval(rpc: str, pk: str, token: str, owner: str, router: str, needed: int) -> None:
current = allowance_of(rpc, token, owner, router)
if current >= needed:
return
cast_send(rpc, pk, token, "approve(address,uint256)(bool)", router, MAX_UINT)
def pair_for(rpc: str, factory: str, a: str, b: str) -> str:
return cast_call(rpc, factory, "getPair(address,address)(address)", a, b)
def ensure_pair(rpc: str, pk: str, factory: str, a: str, b: str) -> None:
pair = pair_for(rpc, factory, a, b)
if pair.lower() != "0x0000000000000000000000000000000000000000":
return
cast_send(rpc, pk, factory, "createPair(address,address)(address)", a, b)
def add_liquidity(rpc: str, pk: str, router: str, a: str, b: str, amt_a: int, amt_b: int, signer: str) -> None:
deadline = str(int(run(["date", "+%s"])) + 3600)
cast_send(
rpc,
pk,
router,
"addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)",
a,
b,
str(amt_a),
str(amt_b),
str(amt_a),
str(amt_b),
signer,
deadline,
)
def main() -> None:
pk = os.environ.get("PRIVATE_KEY")
if not pk:
raise RuntimeError("PRIVATE_KEY missing")
signer = run(["cast", "wallet", "address", "--private-key", pk])
status = load_json(DEPLOYMENT_STATUS)
wave2 = load_json(WAVE2)
skip_chains = parse_chain_list(os.environ.get("SKIP_CHAINS"))
start_chain = int(os.environ.get("START_CHAIN", "0"))
for entry in wave2["entries"]:
chain_id = entry["chain_id"]
if chain_id < start_chain or chain_id in skip_chains:
print(f"SKIP CHAIN {chain_id}: filtered", flush=True)
continue
rpc = resolve_rpc(chain_id)
chain = chain_status(chain_id, status)
factory = os.environ[f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY"]
router = os.environ[f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER"]
print(f"\n=== CHAIN {chain_id} {entry['network']} ===", flush=True)
try:
active_assets = [a for a in entry["remaining_wrapped_mesh_assets_after_wave1"] if a not in SKIP_ASSETS]
needed_stable = len(active_assets) * STABLE_RAW
cwusdc = token_addr(chain, "cWUSDC")
cwusdt = token_addr(chain, "cWUSDT")
ensure_mint_balance(rpc, pk, cwusdc, signer, needed_stable)
ensure_mint_balance(rpc, pk, cwusdt, signer, needed_stable)
ensure_approval(rpc, pk, cwusdc, signer, router, needed_stable)
ensure_approval(rpc, pk, cwusdt, signer, router, needed_stable)
for asset in active_assets:
if asset not in ASSET_RAW:
raise RuntimeError(f"no amount rule for {asset}")
addr = token_addr(chain, asset)
if not has_code(rpc, addr):
print(f"SKIP {chain_id} {asset}: no code", flush=True)
continue
needed_asset = ASSET_RAW[asset] * 2
ensure_mint_balance(rpc, pk, addr, signer, needed_asset)
ensure_approval(rpc, pk, addr, signer, router, needed_asset)
for stable_symbol, stable_addr in [("cWUSDC", cwusdc), ("cWUSDT", cwusdt)]:
ensure_pair(rpc, pk, factory, addr, stable_addr)
add_liquidity(rpc, pk, router, addr, stable_addr, ASSET_RAW[asset], STABLE_RAW, signer)
except RuntimeError as exc:
print(f"BLOCKED CHAIN {chain_id}: {exc}", flush=True)
continue
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,318 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
PHASE2_WAVE1 = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave1-completion-status-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-gru-v2-full-mesh-gap-report-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_GRU_V2_FULL_MESH_GAP_REPORT.md"
TARGET_CHAIN_IDS = [1, 10, 25, 56, 100, 137, 8453, 42161, 42220, 43114]
CORE_ASSETS = {"cWUSDC", "cWUSDT"}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load_json(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def chain_entry(status: dict, chain_id: int) -> dict:
return status["chains"][str(chain_id)]
def cw_assets(chain: dict) -> list[str]:
return sorted(symbol for symbol in chain.get("cwTokens", {}) if symbol.startswith("cW"))
def pmm_live_pairs(chain: dict) -> list[str]:
pairs = []
for pool in chain.get("pmmPools", []):
base = pool.get("base")
quote = pool.get("quote")
if isinstance(base, str) and base.startswith("cW"):
pairs.append(f"{base}/{quote}")
return sorted(set(pairs))
def pmm_live_assets(chain: dict) -> list[str]:
assets = []
for pool in chain.get("pmmPools", []):
base = pool.get("base")
if isinstance(base, str) and base.startswith("cW"):
assets.append(base)
return sorted(set(assets))
def uniswap_v2_core_pairs(chain: dict) -> list[str]:
pairs = []
for pool in chain.get("uniswapV2Pools", []):
base = pool.get("base")
quote = pool.get("quote")
if isinstance(base, str) and isinstance(quote, str):
pairs.append(f"{base}/{quote}")
return sorted(set(pairs))
def phase2_wave1_map(payload: dict) -> dict[int, list[dict]]:
by_chain: dict[int, list[dict]] = {}
for entry in payload.get("chains", []):
by_chain[entry["chain_id"]] = entry.get("wave1_pairs", [])
return by_chain
def wave1_pairs_for_chain(by_chain: dict[int, list[dict]], chain_id: int) -> list[str]:
return sorted({row["pair"] for row in by_chain.get(chain_id, [])})
def wave1_assets_for_chain(by_chain: dict[int, list[dict]], chain_id: int) -> list[str]:
assets = set()
for row in by_chain.get(chain_id, []):
left, right = row["pair"].split("/")
if left.startswith("cW"):
assets.add(left)
if right.startswith("cW"):
assets.add(right)
return sorted(assets)
def ref_status(chain: dict, protocol: str) -> dict:
rows = [row for row in chain.get("gasReferenceVenues", []) if row.get("protocol") == protocol]
supported = any(row.get("supported") is True for row in rows)
live = any(row.get("live") is True for row in rows)
routing_visible = any(row.get("routingVisible") is True for row in rows)
if live:
status = "live"
elif supported:
status = "reference_only"
else:
status = "unsupported"
return {
"status": status,
"supported": supported,
"live": live,
"routing_visible": routing_visible,
"rows": rows,
}
def remaining_mesh_assets(chain_assets: list[str], wave1_assets: list[str]) -> list[str]:
done = CORE_ASSETS | set(wave1_assets)
return sorted(asset for asset in chain_assets if asset not in done)
def recommended_pairs(assets: list[str]) -> list[str]:
pairs = []
for asset in assets:
if asset == "cWUSDC":
continue
pairs.append(f"{asset}/cWUSDC")
if asset != "cWUSDT":
pairs.append(f"{asset}/cWUSDT")
return pairs
def main() -> None:
deployment_status = load_json(DEPLOYMENT_STATUS)
phase2_wave1 = load_json(PHASE2_WAVE1)
wave1_by_chain = phase2_wave1_map(phase2_wave1)
chain_rows = []
balancer_live = 0
curve_live = 0
uniswap_v3_live = 0
for chain_id in TARGET_CHAIN_IDS:
chain = chain_entry(deployment_status, chain_id)
assets = cw_assets(chain)
pmm_pairs = pmm_live_pairs(chain)
pmm_assets = pmm_live_assets(chain)
uv2_core_pairs = uniswap_v2_core_pairs(chain)
uv2_wave1_pairs = wave1_pairs_for_chain(wave1_by_chain, chain_id)
uv2_wave1_assets = wave1_assets_for_chain(wave1_by_chain, chain_id)
remaining_assets = remaining_mesh_assets(assets, uv2_wave1_assets)
balancer = ref_status(chain, "balancer")
curve = ref_status(chain, "curve")
uniswap_v3 = ref_status(chain, "uniswap_v3")
oneinch = ref_status(chain, "1inch")
if balancer["live"]:
balancer_live += 1
if curve["live"]:
curve_live += 1
if uniswap_v3["live"]:
uniswap_v3_live += 1
chain_rows.append(
{
"chain_id": chain_id,
"network": chain["name"],
"bridge_available": chain.get("bridgeAvailable", False),
"cw_asset_count": len(assets),
"cw_assets": assets,
"venue_status": {
"dodo_pmm": {
"status": "live" if pmm_pairs else "not_recorded",
"live_pair_count": len(pmm_pairs),
"live_pairs": pmm_pairs,
"live_asset_count": len(pmm_assets),
"live_assets": pmm_assets,
},
"uniswap_v2": {
"status": "wave1_live" if uv2_wave1_pairs else ("core_only" if uv2_core_pairs else "not_recorded"),
"core_pair_count": len(uv2_core_pairs),
"core_pairs": uv2_core_pairs,
"wave1_pair_count": len(uv2_wave1_pairs),
"wave1_pairs": uv2_wave1_pairs,
"wave1_asset_count": len(uv2_wave1_assets),
"wave1_assets": uv2_wave1_assets,
},
"balancer": balancer,
"curve": curve,
"uniswap_v3": uniswap_v3,
"oneinch": oneinch,
},
"remaining_wrapped_mesh_assets_after_wave1": remaining_assets,
"recommended_next_wrapped_mesh_pairs": recommended_pairs(remaining_assets),
}
)
payload = {
"generated_at": now(),
"program_name": "promod-gru-v2-full-mesh-gap-report",
"purpose": "Strict per-network GRU v2 cW* mesh report showing what is already live on DODO PMM and Uniswap V2, and what remains before Balancer/Curve-style venue expansion becomes meaningful.",
"accounting_split_rule": {
"source_balance_fraction_to_move": "75%",
"destination_network_count": 10,
"per_chain_fraction_of_current_source_balance": "7.5%",
"base_unit_rule": "amount_75 = balance * 75 // 100; per_chain = amount_75 // 10; remainder stays on Chain 138 or is adjusted on the last transfer",
},
"repo_reality": {
"chain138_generic_source_to_all_destinations": "not_available_as_single_generic_bridge_button",
"supported_pattern": "destination cW deployment plus lock/mint bridge or destination-side minting through the CW bridge program where selectors, mappings, and destination receivers are actually configured",
"venue_summary": {
"dodo_pmm": "live cW asset pools are recorded across the public network set",
"uniswap_v2": "live core rail is recorded in deployment-status; live wave-1 wrapped mesh completion is tracked separately in the phase-2 completion report",
"balancer": "currently modeled as reference or planned venue surface, not a live cW asset mesh deployment set",
"curve": "currently modeled as reference or planned venue surface, not a live cW asset mesh deployment set",
"uniswap_v3": "currently modeled as reference surface in deployment-status gasReferenceVenues",
"oneinch": "aggregator visibility surface only, not a pool deployment venue",
},
},
"network_summary": {
"target_chain_ids": TARGET_CHAIN_IDS,
"dodo_pmm_live_chain_count": sum(1 for row in chain_rows if row["venue_status"]["dodo_pmm"]["live_pair_count"] > 0),
"uniswap_v2_wave1_complete_chain_count": sum(1 for row in chain_rows if row["venue_status"]["uniswap_v2"]["wave1_pair_count"] > 0),
"balancer_live_chain_count": balancer_live,
"curve_live_chain_count": curve_live,
"uniswap_v3_live_chain_count": uniswap_v3_live,
},
"chains": chain_rows,
"source_artifacts": [
"cross-chain-pmm-lps/config/deployment-status.json",
"reports/extraction/promod-uniswap-v2-phase2-wave1-completion-status-latest.json",
"config/token-mapping-multichain.json",
"docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod GRU v2 Full Mesh Gap Report",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: `{payload['program_name']}`",
"- Purpose: strict per-network report for the remaining GRU v2 `cW*` mesh across DODO PMM, Uniswap V2, Balancer, Curve, and related venue surfaces.",
"",
"## Accounting Rule",
"",
f"- Source balance fraction to move: `{payload['accounting_split_rule']['source_balance_fraction_to_move']}`",
f"- Destination network count: `{payload['accounting_split_rule']['destination_network_count']}`",
f"- Per-chain fraction of current source balance: `{payload['accounting_split_rule']['per_chain_fraction_of_current_source_balance']}`",
f"- Base unit rule: `{payload['accounting_split_rule']['base_unit_rule']}`",
"",
"## Repo Reality",
"",
f"- Chain 138 generic source-to-all-destinations bridge: `{payload['repo_reality']['chain138_generic_source_to_all_destinations']}`",
f"- Supported pattern: {payload['repo_reality']['supported_pattern']}",
"- Venue summary:",
f" - DODO PMM: {payload['repo_reality']['venue_summary']['dodo_pmm']}",
f" - Uniswap V2: {payload['repo_reality']['venue_summary']['uniswap_v2']}",
f" - Balancer: {payload['repo_reality']['venue_summary']['balancer']}",
f" - Curve: {payload['repo_reality']['venue_summary']['curve']}",
f" - Uniswap V3: {payload['repo_reality']['venue_summary']['uniswap_v3']}",
f" - 1inch: {payload['repo_reality']['venue_summary']['oneinch']}",
"",
"## Network Summary",
"",
f"- DODO PMM live chain count: `{payload['network_summary']['dodo_pmm_live_chain_count']}` / `10`",
f"- Uniswap V2 wave-1 complete chain count: `{payload['network_summary']['uniswap_v2_wave1_complete_chain_count']}` / `10`",
f"- Balancer live chain count: `{payload['network_summary']['balancer_live_chain_count']}` / `10`",
f"- Curve live chain count: `{payload['network_summary']['curve_live_chain_count']}` / `10`",
f"- Uniswap V3 live chain count: `{payload['network_summary']['uniswap_v3_live_chain_count']}` / `10`",
"",
"| Chain | Network | Bridge | DODO PMM | Uniswap V2 | Balancer | Curve | Remaining Mesh Assets After Wave 1 |",
"|---|---|---|---|---|---|---|---|",
]
for row in chain_rows:
lines.append(
f"| `{row['chain_id']}` | {row['network']} | `{row['bridge_available']}` | "
f"`{row['venue_status']['dodo_pmm']['live_pair_count']} live` | "
f"`{row['venue_status']['uniswap_v2']['wave1_pair_count']} wave-1 pairs` | "
f"`{row['venue_status']['balancer']['status']}` | "
f"`{row['venue_status']['curve']['status']}` | "
f"{', '.join(f'`{asset}`' for asset in row['remaining_wrapped_mesh_assets_after_wave1']) or '`none`'} |"
)
lines.extend(["", "## Per-Network Detail", ""])
for row in chain_rows:
lines.append(f"### Chain `{row['chain_id']}` — {row['network']}")
lines.append("")
lines.append(f"- Bridge available: `{row['bridge_available']}`")
lines.append(f"- cW assets: {', '.join(f'`{asset}`' for asset in row['cw_assets'])}")
lines.append("")
lines.append(f"- DODO PMM live pairs: `{row['venue_status']['dodo_pmm']['live_pair_count']}`")
if row["venue_status"]["dodo_pmm"]["live_pairs"]:
lines.append(f" - {', '.join(f'`{pair}`' for pair in row['venue_status']['dodo_pmm']['live_pairs'])}")
lines.append(f"- Uniswap V2 core pairs: `{row['venue_status']['uniswap_v2']['core_pair_count']}`")
if row["venue_status"]["uniswap_v2"]["core_pairs"]:
lines.append(f" - {', '.join(f'`{pair}`' for pair in row['venue_status']['uniswap_v2']['core_pairs'])}")
lines.append(f"- Uniswap V2 wave-1 pairs: `{row['venue_status']['uniswap_v2']['wave1_pair_count']}`")
if row["venue_status"]["uniswap_v2"]["wave1_pairs"]:
lines.append(f" - {', '.join(f'`{pair}`' for pair in row['venue_status']['uniswap_v2']['wave1_pairs'])}")
lines.append(f"- Balancer status: `{row['venue_status']['balancer']['status']}`")
lines.append(f"- Curve status: `{row['venue_status']['curve']['status']}`")
lines.append(f"- Uniswap V3 status: `{row['venue_status']['uniswap_v3']['status']}`")
lines.append(f"- 1inch status: `{row['venue_status']['oneinch']['status']}`")
lines.append(
f"- Remaining wrapped mesh assets after wave 1: {', '.join(f'`{asset}`' for asset in row['remaining_wrapped_mesh_assets_after_wave1']) or '`none`'}"
)
lines.append(
f"- Recommended next wrapped mesh pairs: {', '.join(f'`{pair}`' for pair in row['recommended_next_wrapped_mesh_pairs']) or '`none`'}"
)
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,186 @@
#!/usr/bin/env python3
from __future__ import annotations
from pathlib import Path
import json
import time
ROOT = Path(__file__).resolve().parents[2]
PROMOD_REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-liquidity-program-latest.json"
PROMOTION_GATES = ROOT / "reports" / "extraction" / "promod-uniswap-v2-promotion-gates-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-first-deployment-target-matrix-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_FIRST_DEPLOYMENT_TARGET_MATRIX.md"
CORE_RAILS = {"cWUSDC", "cWUSDT", "cWAUSDT"}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def choose_first_pair(entry: dict) -> str:
settlement = entry.get("settlement_phase_pairs", [])
wrapped = entry.get("wrapped_depth_phase_pairs", [])
if "cWUSDT/cWUSDC" in wrapped:
return "cWUSDT/cWUSDC"
if "cWAUSDT/cWUSDT" in wrapped:
return "cWAUSDT/cWUSDT"
if "cWAUSDT/cWUSDC" in wrapped:
return "cWAUSDT/cWUSDC"
hub = entry.get("hub_stable")
if hub == "USDT" and "cWUSDT/USDT" in settlement:
return "cWUSDT/USDT"
if hub == "USDC" and "cWUSDC/USDC" in settlement:
return "cWUSDC/USDC"
if settlement:
return settlement[0]
if wrapped:
return wrapped[0]
return ""
def why_first(entry: dict, pair: str) -> str:
documented = set(entry.get("documented_cw_tokens", []))
if pair == "cWUSDT/cWUSDC":
if "cWAUSDT" in documented:
return "Core wrapped support rail and the one we are standardizing on first; it creates transferable depth between the two settlement-adjacent cW rails before any canonical stable exit pool."
return "Core wrapped support rail and the fixed first deployment target where cWUSDT and cWUSDC are already documented."
if pair == "cWAUSDT/cWUSDT":
return "Fallback wrapped-first rail when cWUSDT/cWUSDC is unavailable but cWAUSDT and cWUSDT are both documented."
if pair == "cWAUSDT/cWUSDC":
return "Fallback wrapped-first rail when cWUSDT/cWUSDC is unavailable but cWAUSDT and cWUSDC are both documented."
hub = entry["hub_stable"]
if pair == "cWUSDT/USDT" and hub == "USDT":
return "Fallback canonical settlement rail only for a USDT-hub chain that cannot yet open with cWUSDT/cWUSDC."
if pair == "cWUSDC/USDC" and hub == "USDC":
return "Fallback canonical settlement rail only for a USDC-hub chain that cannot yet open with cWUSDT/cWUSDC."
if pair.startswith("cWAUSDT/"):
return "cWAUSDT is documented on this chain, but the first pool should still be a support or settlement rail before cWAUSDT expansion."
return "First deployable rail selected from the current program ordering."
def post_deploy_commands(chain_id: int) -> list[str]:
return [
"bash scripts/verify/build-promod-uniswap-v2-live-pair-discovery.sh",
"python3 scripts/lib/promod_uniswap_v2_live_pair_discovery.py --write-discovered",
"node cross-chain-pmm-lps/scripts/validate-deployment-status.cjs cross-chain-pmm-lps/config/deployment-status.json",
"bash scripts/verify/build-promod-uniswap-v2-promotion-gates.sh",
]
def build_next_expansion_pairs(entry: dict) -> list[str]:
documented = entry.get("documented_cw_tokens", [])
extras = [token for token in documented if token not in CORE_RAILS]
pairs: list[str] = []
for token in extras:
if "cWUSDC" in documented:
pairs.append(f"{token}/cWUSDC")
if "cWUSDT" in documented:
pairs.append(f"{token}/cWUSDT")
return pairs
def main():
promod = load(PROMOD_REPORT)
gates = load(PROMOTION_GATES)
gates_by_chain = {entry["chain_id"]: entry for entry in gates["entries"]}
entries = []
for entry in promod["entries"]:
chain_id = entry["chain_id"]
first_pair = choose_first_pair(entry)
token_a, token_b = first_pair.split("/") if first_pair else ("", "")
gate = gates_by_chain.get(chain_id, {})
remaining = [
blocker
for blocker in gate.get("blocking_items", [])
if "cWAUSDT is not currently documented" not in blocker
]
additional_tokens = [token for token in entry.get("documented_cw_tokens", []) if token not in CORE_RAILS]
next_pairs = build_next_expansion_pairs(entry)
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"tier": entry["tier"],
"hub_stable": entry["hub_stable"],
"first_pair": first_pair,
"why_it_should_be_first": why_first(entry, first_pair),
"required_tokens": [token_a, token_b],
"other_gru_v2_cw_tokens": additional_tokens,
"next_wrapped_expansion_pairs": next_pairs,
"required_token_addresses_present": [token_a in entry.get("documented_cw_tokens", []) or token_a in {"USDC", "USDT"},
token_b in entry.get("documented_cw_tokens", []) or token_b in {"USDC", "USDT"}],
"chain_ready_for_deploy_prep": gate.get("env_values_present", False) and gate.get("promotion_gate", {}).get("indexer_support_present", False),
"remaining_live_blockers": remaining,
"post_deploy_commands": post_deploy_commands(chain_id),
}
)
payload = {
"generated_at": now(),
"program_name": promod["program_name"],
"purpose": "Exact first-pair deployment target matrix for Mr. Promod's Uniswap V2 rollout.",
"mainnet_funding_posture": promod["mainnet_funding_posture"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-liquidity-program-latest.json",
"reports/extraction/promod-uniswap-v2-promotion-gates-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 First Deployment Target Matrix",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: {payload['program_name']}",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
"- Purpose: choose the first pair per chain so the first live pool rollout is concrete rather than generic.",
"",
"| Chain | Network | First Pair | Why It Should Be First | Required Tokens | Other GRU v2 cW* Tokens | Next Wrapped Expansion Pairs | Post-Deploy Commands |",
"|---|---|---|---|---|---|---|---|",
]
for entry in entries:
commands = "<br>".join(f"`{cmd}`" for cmd in entry["post_deploy_commands"])
required = ", ".join(f"`{token}`" for token in entry["required_tokens"] if token)
extras = ", ".join(f"`{token}`" for token in entry["other_gru_v2_cw_tokens"]) or ""
next_pairs = ", ".join(f"`{pair}`" for pair in entry["next_wrapped_expansion_pairs"][:8]) or ""
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['first_pair']}` | {entry['why_it_should_be_first']} | {required} | {extras} | {next_pairs} | {commands} |"
)
lines.extend(
[
"",
"## Selection Rule",
"",
"- First choice is always `cWUSDT/cWUSDC` when both rails are documented on the chain.",
"- If `cWUSDT/cWUSDC` is unavailable, prefer a wrapped-first fallback before opening a canonical settlement rail.",
"- The other documented GRU v2 `cW*` assets expand next as wrapped pairs against `cWUSDC` and `cWUSDT`.",
"- Canonical settlement rails such as `cWUSDC/USDC` or `cWUSDT/USDT` become the first target only when the chain cannot open with the wrapped support rail.",
"- `cWAUSDT` remains a wrapped-depth support asset, not the default first target, unless it is required as the only viable wrapped-first fallback.",
]
)
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,203 @@
#!/usr/bin/env python3
from pathlib import Path
import json
import time
ROOT = Path(__file__).resolve().parents[2]
POOL_MATRIX = ROOT / "cross-chain-pmm-lps" / "config" / "pool-matrix.json"
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
POLICY = ROOT / "config" / "extraction" / "promod-uniswap-v2-liquidity-policy.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-liquidity-program-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_LIQUIDITY_PROGRAM.md"
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def now():
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def tier_for(chain_id: str, tiers: dict) -> str:
for tier_name, chain_ids in tiers.items():
if chain_id in chain_ids:
return tier_name
return "unassigned"
CORE_RAILS = {"cWUSDC", "cWUSDT", "cWAUSDT"}
def build_additional_gru_pairs(cw_symbols: list[str]) -> list[str]:
extras = [symbol for symbol in cw_symbols if symbol not in CORE_RAILS]
pairs: list[str] = []
for symbol in extras:
if "cWUSDC" in cw_symbols:
pairs.append(f"{symbol}/cWUSDC")
if "cWUSDT" in cw_symbols:
pairs.append(f"{symbol}/cWUSDT")
return pairs
def main():
matrix = load(POOL_MATRIX)
status = load(DEPLOYMENT_STATUS)
policy = load(POLICY)
entries = []
tier_counts = {}
for chain_id in sorted(matrix["chains"].keys(), key=lambda c: int(c)):
chain_matrix = matrix["chains"][chain_id]
chain_status = status["chains"].get(chain_id)
if not chain_status or not chain_status.get("bridgeAvailable"):
continue
cw_tokens = chain_status.get("cwTokens", {})
cw_symbols = sorted(cw_tokens.keys())
additional_gru_tokens = [symbol for symbol in cw_symbols if symbol not in CORE_RAILS]
hub = chain_matrix.get("hubStable")
pools_first = chain_matrix.get("poolsFirst", [])
existing_pairs = sorted(
{
f"{pool.get('base')}/{pool.get('quote')}"
for pool in (chain_status.get("pmmPools") or [])
if pool.get("base") and pool.get("quote")
}
)
wrapped_pairs = []
if "cWAUSDT" in cw_tokens and "cWUSDC" in cw_tokens:
wrapped_pairs.append("cWAUSDT/cWUSDC")
if "cWAUSDT" in cw_tokens and "cWUSDT" in cw_tokens:
wrapped_pairs.append("cWAUSDT/cWUSDT")
if "cWUSDT" in cw_tokens and "cWUSDC" in cw_tokens:
wrapped_pairs.append("cWUSDT/cWUSDC")
additional_wrapped_pairs = build_additional_gru_pairs(cw_symbols)
settlement_candidates = []
for symbol in ("cWUSDC", "cWUSDT", "cWAUSDT"):
pair = f"{symbol}/{hub}"
if symbol in cw_tokens and pair in pools_first:
settlement_candidates.append(pair)
blockers = [
"Uniswap V2 factory/router addresses are not documented in-repo for this chain; env-backed deployment or factory mapping is still required.",
"New Uniswap V2 pools must be added to token-aggregation indexing and MCP/API visibility before promotion."
]
if "cWAUSDT" not in cw_tokens:
blockers.append("cWAUSDT is not currently documented on this chain, so wrapped-depth phase is limited to cWUSDT/cWUSDC.")
if not settlement_candidates:
blockers.append("No canonical settlement candidate from the current pool-matrix matched the documented cW token set.")
tier = tier_for(chain_id, policy["priority_tiers"])
tier_counts[tier] = tier_counts.get(tier, 0) + 1
entries.append(
{
"chain_id": int(chain_id),
"network": chain_matrix.get("name"),
"tier": tier,
"hub_stable": hub,
"bridge_available": True,
"documented_cw_tokens": cw_symbols,
"additional_gru_v2_cw_tokens": additional_gru_tokens,
"wrapped_depth_phase_pairs": wrapped_pairs,
"additional_wrapped_depth_pairs": additional_wrapped_pairs,
"settlement_phase_pairs": settlement_candidates,
"existing_pmm_analogs": [
pair
for pair in existing_pairs
if pair in wrapped_pairs or pair in settlement_candidates or pair in additional_wrapped_pairs
],
"uniswap_v2_deployment_status": "planned",
"required_uniswap_v2_env_suffixes": policy["uniswap_v2_requirements"]["required_env_suffixes"],
"blockers": blockers
}
)
payload = {
"generated_at": now(),
"program_name": policy["program_name"],
"purpose": policy["purpose"],
"operator_rule": policy["operator_rule"],
"mainnet_funding_posture": policy["mainnet_funding_posture"],
"wrapped_depth_phase": policy["wrapped_depth_phase"],
"settlement_phase": policy["settlement_phase"],
"priority_tiers": policy["priority_tiers"],
"tier_counts": tier_counts,
"registry_sources": [
"cross-chain-pmm-lps/config/pool-matrix.json",
"cross-chain-pmm-lps/config/deployment-status.json",
"config/extraction/promod-uniswap-v2-liquidity-policy.json"
],
"entries": entries
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Liquidity Program",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: {payload['program_name']}",
"- Strict note: this is a repo-native desired-state rollout package for Uniswap V2 or Uniswap-V2-compatible pools. It does not claim live deployment unless factory/router addresses and pool addresses are recorded.",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
"",
"## Deployment Model",
"",
"- Phase 1: build wrapped-depth support first when canonical USDC or USDT is scarce.",
"- Phase 2: add or deepen canonical settlement rails once enough hub-stable inventory exists.",
"- Promotion gate: do not treat any Uniswap V2 rail as live until its factory/router env, pool address, and indexer visibility are all recorded.",
"",
"## Wrapped-Depth Policy",
"",
f"- Preferred wrapped pairs: `{', '.join(payload['wrapped_depth_phase']['preferred_pairs_in_order'])}`",
f"- Allocation split: flagship `{payload['wrapped_depth_phase']['allocation_share_pct']['flagship_pair']}%`, second `{payload['wrapped_depth_phase']['allocation_share_pct']['second_pair']}%`, third `{payload['wrapped_depth_phase']['allocation_share_pct']['third_pair']}%`",
"",
"## Operator Matrix",
"",
"| Chain | Network | Tier | Hub Stable | Core Wrapped Pairs | Additional GRU v2 cW* Tokens | Next Wrapped Expansion Pairs | Settlement Phase Pairs | Existing PMM Analogs |",
"|---|---|---|---|---|---|---|---|---|",
]
for entry in entries:
wrapped = ", ".join(f"`{pair}`" for pair in entry["wrapped_depth_phase_pairs"]) or ""
extra_tokens = ", ".join(f"`{token}`" for token in entry["additional_gru_v2_cw_tokens"]) or ""
extra_pairs = ", ".join(f"`{pair}`" for pair in entry["additional_wrapped_depth_pairs"][:8]) or ""
settlement = ", ".join(f"`{pair}`" for pair in entry["settlement_phase_pairs"]) or ""
analogs = ", ".join(f"`{pair}`" for pair in entry["existing_pmm_analogs"]) or ""
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['tier']}` | `{entry['hub_stable']}` | {wrapped} | {extra_tokens} | {extra_pairs} | {settlement} | {analogs} |"
)
lines.extend(
[
"",
"## Readiness Rules",
"",
"- Use `cWAUSDT` as a wrapped-depth support asset, not as a full replacement for `USDC` or `USDT` settlement.",
"- Keep `cWUSDT/cWUSDC` available everywhere both core wrapped rails are documented.",
"- After the first rail is live, expand the other documented GRU v2 `cW*` assets against `cWUSDC` and `cWUSDT` before opening broad canonical settlement exits.",
"- Prefer `cWUSDC/USDC`, `cWUSDT/USDT`, or `cWAUSDT/USDC|USDT` only after canonical hub inventory is deep enough to matter.",
"- For every promoted chain, add the new pools to `deployment-status.json`, Uniswap V2 env config, and token-aggregation indexing.",
]
)
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,299 @@
#!/usr/bin/env python3
from __future__ import annotations
from pathlib import Path
import argparse
from decimal import Decimal, getcontext
import json
import re
import subprocess
import time
getcontext().prec = 42
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
ENV_PATH = ROOT / "smom-dbis-138" / ".env"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_LIVE_PAIR_DISCOVERY.md"
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
HEALTHY_DEVIATION_BPS = Decimal("25")
MIN_HEALTHY_RESERVE_UNITS = Decimal("1000")
UINT_RE = re.compile(r"\b\d+\b")
CHAIN_CONFIG = {
"1": {"rpc_keys": ["ETHEREUM_MAINNET_RPC"], "hub": "USDC"},
"10": {"rpc_keys": ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"], "hub": "USDC"},
"25": {"rpc_keys": ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"], "hub": "USDT"},
"56": {"rpc_keys": ["BSC_RPC_URL", "BSC_MAINNET_RPC"], "hub": "USDT"},
"100": {"rpc_keys": ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"], "hub": "USDC"},
"137": {"rpc_keys": ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"], "hub": "USDC"},
"42220": {"rpc_keys": ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"], "hub": "USDC"},
"43114": {"rpc_keys": ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"], "hub": "USDC"},
"8453": {"rpc_keys": ["BASE_RPC_URL", "BASE_MAINNET_RPC"], "hub": "USDC"},
"42161": {"rpc_keys": ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"], "hub": "USDC"},
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load_json(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def load_env(path: Path):
values = {}
for raw_line in path.read_text().splitlines():
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
values[key.strip()] = value.strip()
return values
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
if seen is None:
seen = set()
if key in seen:
return env_values.get(key, "")
seen.add(key)
value = env_values.get(key, "")
if value.startswith("${") and value.endswith("}"):
inner = value[2:-1]
target = inner.split(":-", 1)[0]
return resolve_env_value(target, env_values, seen)
return value
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
return subprocess.check_output(cmd, text=True).strip()
def parse_uint(value: str) -> int:
matches = UINT_RE.findall(value)
if not matches:
raise ValueError(f"could not parse integer from {value!r}")
return int(matches[0])
def parse_uints(value: str, count: int) -> list[int]:
matches = [int(match) for match in UINT_RE.findall(value)]
if len(matches) < count:
raise ValueError(f"expected {count} integers, got {value!r}")
return matches[:count]
def parse_address(value: str) -> str:
match = re.search(r"0x[a-fA-F0-9]{40}", value)
if not match:
raise ValueError(f"could not parse address from {value!r}")
return match.group(0)
def normalize_units(raw: int, decimals: int) -> Decimal:
return Decimal(raw) / (Decimal(10) ** decimals)
def compute_pair_health(rpc_url: str, pair_address: str, base_address: str, quote_address: str) -> dict:
token0 = parse_address(cast_call(rpc_url, pair_address, "token0()(address)"))
token1 = parse_address(cast_call(rpc_url, pair_address, "token1()(address)"))
reserve0_raw, reserve1_raw, _ = parse_uints(cast_call(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3)
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
base_raw, quote_raw = reserve0_raw, reserve1_raw
base_decimals, quote_decimals = decimals0, decimals1
elif token1.lower() == base_address.lower() and token0.lower() == quote_address.lower():
base_raw, quote_raw = reserve1_raw, reserve0_raw
base_decimals, quote_decimals = decimals1, decimals0
else:
raise ValueError(f"pair tokens {token0}/{token1} do not match {base_address}/{quote_address}")
base_units = normalize_units(base_raw, base_decimals)
quote_units = normalize_units(quote_raw, quote_decimals)
price = Decimal(0) if base_units == 0 else quote_units / base_units
deviation_bps = abs(price - Decimal(1)) * Decimal(10000)
depth_ok = base_units >= MIN_HEALTHY_RESERVE_UNITS and quote_units >= MIN_HEALTHY_RESERVE_UNITS
parity_ok = deviation_bps <= HEALTHY_DEVIATION_BPS
return {
"baseReserveRaw": str(base_raw),
"quoteReserveRaw": str(quote_raw),
"baseReserveUnits": str(base_units),
"quoteReserveUnits": str(quote_units),
"priceQuotePerBase": str(price),
"deviationBps": str(deviation_bps),
"depthOk": depth_ok,
"parityOk": parity_ok,
"healthy": depth_ok and parity_ok,
}
def candidate_pairs(chain: dict) -> list[tuple[str, str, str, str]]:
cw = chain.get("cwTokens", {})
anchors = chain.get("anchorAddresses", {})
hub = "USDC" if "USDC" in anchors else "USDT"
pairs: list[tuple[str, str, str, str]] = []
for base in ["cWUSDC", "cWUSDT", "cWAUSDT"]:
if base in cw and hub in anchors:
pairs.append((base, hub, cw[base], anchors[hub]))
if "cWUSDT" in cw and "cWUSDC" in cw:
pairs.append(("cWUSDT", "cWUSDC", cw["cWUSDT"], cw["cWUSDC"]))
if "cWAUSDT" in cw and "cWUSDT" in cw:
pairs.append(("cWAUSDT", "cWUSDT", cw["cWAUSDT"], cw["cWUSDT"]))
if "cWAUSDT" in cw and "cWUSDC" in cw:
pairs.append(("cWAUSDT", "cWUSDC", cw["cWAUSDT"], cw["cWUSDC"]))
return pairs
def append_discovered_pair(status: dict, chain_id: str, pair: dict):
chain = status["chains"][chain_id]
rows = chain.setdefault("uniswapV2Pools", [])
normalized = pair["poolAddress"].lower()
if any(str(row.get("poolAddress", "")).lower() == normalized for row in rows):
return False
rows.append(pair)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--write-discovered", action="store_true", help="Write discovered live pairs into deployment-status.json under uniswapV2Pools.")
args = parser.parse_args()
status = load_json(DEPLOYMENT_STATUS)
env_values = load_env(ENV_PATH)
entries = []
discovered_for_write = []
for chain_id, config in CHAIN_CONFIG.items():
chain = status["chains"].get(chain_id)
if not chain:
continue
factory = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", env_values)
router = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", env_values)
start_block = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK", env_values) or "0"
rpc_url = ""
for key in config["rpc_keys"]:
value = resolve_env_value(key, env_values)
if value:
rpc_url = value
break
env_ready = bool(factory and router and rpc_url)
pairs = []
if env_ready:
for base, quote, token0, token1 in candidate_pairs(chain):
try:
pair_address = cast_call(rpc_url, factory, "getPair(address,address)(address)", token0, token1)
except Exception as exc:
pair_address = f"ERROR:{exc}"
live = pair_address.lower() != ZERO_ADDRESS and not pair_address.startswith("ERROR:")
row = {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"live": live,
}
if live:
try:
row["health"] = compute_pair_health(rpc_url, pair_address, token0, token1)
except Exception as exc:
row["health"] = {"healthy": False, "error": str(exc)}
pairs.append(row)
if live:
discovered_for_write.append(
{
"chain_id": chain_id,
"row": {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"factoryAddress": factory,
"routerAddress": router,
"startBlock": int(start_block),
"venue": "uniswap_v2_pair",
"publicRoutingEnabled": False,
},
}
)
entries.append(
{
"chain_id": int(chain_id),
"network": chain.get("name"),
"factoryAddress": factory or None,
"routerAddress": router or None,
"startBlock": int(start_block),
"rpcConfigured": bool(rpc_url),
"envReady": env_ready,
"pairsChecked": pairs,
}
)
writes = []
if args.write_discovered:
for item in discovered_for_write:
changed = append_discovered_pair(status, item["chain_id"], item["row"])
if changed:
writes.append(item["row"])
if writes:
DEPLOYMENT_STATUS.write_text(json.dumps(status, indent=2) + "\n")
payload = {
"generated_at": now(),
"write_discovered": args.write_discovered,
"discovered_live_pair_count": len(discovered_for_write),
"healthy_live_pair_count": sum(
1 for entry in entries for row in entry["pairsChecked"] if row.get("health", {}).get("healthy") is True
),
"writes_applied": writes,
"entries": entries,
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Live Pair Discovery",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Live pairs discovered: `{payload['discovered_live_pair_count']}`",
f"- Healthy live pairs: `{payload['healthy_live_pair_count']}`",
f"- Write mode: `{payload['write_discovered']}`",
"",
"| Chain | Network | Env Ready | Live Pairs Found | Healthy Live Pairs |",
"|---|---|---|---|---|",
]
for entry in entries:
live_pairs = [f"`{row['base']}/{row['quote']}`" for row in entry["pairsChecked"] if row["live"]]
healthy_pairs = [
f"`{row['base']}/{row['quote']}`"
for row in entry["pairsChecked"]
if row.get("health", {}).get("healthy") is True
]
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['envReady']}` | {', '.join(live_pairs) if live_pairs else ''} | {', '.join(healthy_pairs) if healthy_pairs else ''} |"
)
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,125 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from decimal import Decimal
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
BASE_MATRIX = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-bridge-from-138-matrix-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-bridge-3x-matrix-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_BRIDGE_3X_MATRIX.md"
MULTIPLIER = Decimal("3")
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def fmt(v: Decimal) -> str:
return format(v.normalize() if v != 0 else Decimal("0"), "f")
def main() -> None:
base = load(BASE_MATRIX)
total_usdt = Decimal("0")
total_usdc = Decimal("0")
entries = []
for entry in base["entries"]:
needed_usdt = Decimal(entry["cUSDT_needed_from_138"])
needed_usdc = Decimal(entry["cUSDC_needed_from_138"])
bridged_usdt = needed_usdt * MULTIPLIER
bridged_usdc = needed_usdc * MULTIPLIER
total_usdt += bridged_usdt
total_usdc += bridged_usdc
entries.append(
{
"chain_id": entry["chain_id"],
"network": entry["network"],
"phase_1_pair": entry["phase_1_pair"],
"base_cUSDT_needed": entry["cUSDT_needed_from_138"],
"base_cUSDC_needed": entry["cUSDC_needed_from_138"],
"bridge_multiplier": "3",
"cUSDT_to_bridge": fmt(bridged_usdt),
"cUSDC_to_bridge": fmt(bridged_usdc),
"bridge_config_env": entry["bridge_config_env"],
"bridge_config_present": entry["bridge_config_present"],
"recommended_action": entry["recommended_action"],
"bridge_note": entry["bridge_note"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 bridge 3x matrix",
"purpose": "Strict triple-sized Chain 138 bridge funding plan for phase-1 cWUSDT/cWUSDC rollout.",
"bridge_multiplier": "3",
"totals_to_bridge_from_138": {
"cUSDT": fmt(total_usdt),
"cUSDC": fmt(total_usdc),
},
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-bridge-from-138-matrix-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Bridge 3x Matrix",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: strict triple-sized Chain 138 bridge funding plan for the remaining phase-1 pool rollout.",
f"- Bridge multiplier: `{payload['bridge_multiplier']}x`",
f"- Total cUSDT to bridge from Chain 138: `{payload['totals_to_bridge_from_138']['cUSDT']}`",
f"- Total cUSDC to bridge from Chain 138: `{payload['totals_to_bridge_from_138']['cUSDC']}`",
"",
"| Chain | Network | Base cUSDT Need | Base cUSDC Need | 3x cUSDT To Bridge | 3x cUSDC To Bridge | Bridge Env |",
"|---|---|---:|---:|---:|---:|---|",
]
for entry in entries:
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | "
f"`{entry['base_cUSDT_needed']}` | "
f"`{entry['base_cUSDC_needed']}` | "
f"`{entry['cUSDT_to_bridge']}` | "
f"`{entry['cUSDC_to_bridge']}` | "
f"`{entry['bridge_config_env']}` |"
)
lines.extend(["", "## Notes", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Recommended action: `{entry['recommended_action']}`")
lines.append(f"- 3x cUSDT to bridge: `{entry['cUSDT_to_bridge']}`")
lines.append(f"- 3x cUSDC to bridge: `{entry['cUSDC_to_bridge']}`")
lines.append(f"- Bridge env: `{entry['bridge_config_env']}`")
lines.append(f"- Bridge present: `{str(entry['bridge_config_present']).lower()}`")
lines.append(f"- Bridge note: {entry['bridge_note']}")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
BRIDGE_3X = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-bridge-3x-matrix-latest.json"
FUNDING_BUNDLE = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-bundle-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-bridge-3x-operator-packet-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_BRIDGE_3X_OPERATOR_PACKET.md"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def synthesize_deploy_block(bundle_entry: dict) -> str:
if bundle_entry.get("exact_post_funding_deploy_block"):
return bundle_entry["exact_post_funding_deploy_block"]
token_addresses = bundle_entry["token_addresses"]
cwusdt = token_addresses["cWUSDT"]
cwusdc = token_addresses["cWUSDC"]
rpc_var = bundle_entry["rpc_env_key"]
amount_raw = bundle_entry.get("recommended_post_funding_amount_raw", "3000000000")
factory = bundle_entry["factory"]
router = bundle_entry["router"]
lines = [
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
f'export RPC_URL="${{{rpc_var}}}"',
f'export FACTORY="{factory}"',
f'export ROUTER="{router}"',
f'export CWUSDT="{cwusdt}"',
f'export CWUSDC="{cwusdc}"',
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
f'export AMOUNT_RAW="{amount_raw}"',
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
"",
'PAIR="$(cast call "$FACTORY" \'getPair(address,address)(address)\' "$CWUSDT" "$CWUSDC" --rpc-url "$RPC_URL")"',
'if [[ "$PAIR" == "0x0000000000000000000000000000000000000000" ]]; then',
' cast send "$FACTORY" \'createPair(address,address)(address)\' "$CWUSDT" "$CWUSDC" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"fi",
"",
'cast send "$CWUSDT" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$CWUSDC" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$ROUTER" \\',
" 'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)' \\",
' "$CWUSDT" "$CWUSDC" "$AMOUNT_RAW" "$AMOUNT_RAW" "$AMOUNT_RAW" "$AMOUNT_RAW" "$SIGNER" "$DEADLINE" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
"python3 scripts/lib/promod_uniswap_v2_live_pair_discovery.py --write-discovered",
"bash scripts/verify/build-promod-uniswap-v2-promotion-gates.sh",
"node cross-chain-pmm-lps/scripts/validate-deployment-status.cjs cross-chain-pmm-lps/config/deployment-status.json",
]
return "\n".join(lines)
def recommended_amount_raw(entry: dict) -> str:
usdt = entry["cUSDT_to_bridge"]
usdc = entry["cUSDC_to_bridge"]
if usdt == "0" and usdc == "0":
return "0"
if usdt == "0":
return str(int(float(usdc) * 1_000_000))
if usdc == "0":
return str(int(float(usdt) * 1_000_000))
return str(min(int(float(usdt) * 1_000_000), int(float(usdc) * 1_000_000)))
def main() -> None:
bridge = load(BRIDGE_3X)
bundle = load(FUNDING_BUNDLE)
bundle_by_chain = {entry["chain_id"]: entry for entry in bundle["entries"]}
entries = []
for entry in bridge["entries"]:
chain_id = entry["chain_id"]
bundle_entry = dict(bundle_by_chain[chain_id])
bundle_entry["recommended_post_funding_amount_raw"] = recommended_amount_raw(entry)
deploy_block = synthesize_deploy_block(bundle_entry)
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"phase_1_pair": bundle_entry["phase_1_pair"],
"cUSDT_to_bridge_3x": entry["cUSDT_to_bridge"],
"cUSDC_to_bridge_3x": entry["cUSDC_to_bridge"],
"destination_cw_addresses": bundle_entry["token_addresses"],
"cw_bridge_env": entry["bridge_config_env"],
"bridge_config_present": entry["bridge_config_present"],
"recommended_action": entry["recommended_action"],
"post_funding_deploy_block": deploy_block,
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 bridge 3x operator packet",
"purpose": "Strict operator packet: chain -> 3x cUSDT/cUSDC amount -> destination cW addresses -> CW_BRIDGE env -> post-funding deploy block.",
"bridge_multiplier": bridge["bridge_multiplier"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-bridge-3x-matrix-latest.json",
"reports/extraction/promod-uniswap-v2-phase1-funding-bundle-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Bridge 3x Operator Packet",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: strict operator packet for `3x` bridge sizing and post-funding pool deployment.",
f"- Bridge multiplier: `{payload['bridge_multiplier']}x`",
"",
"| Chain | Network | 3x cUSDT | 3x cUSDC | Destination cW Addresses | CW_BRIDGE Env |",
"|---|---|---:|---:|---|---|",
]
for entry in entries:
addrs = ", ".join(f"`{k}={v}`" for k, v in entry["destination_cw_addresses"].items())
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | "
f"`{entry['cUSDT_to_bridge_3x']}` | "
f"`{entry['cUSDC_to_bridge_3x']}` | "
f"{addrs} | "
f"`{entry['cw_bridge_env']}` |"
)
lines.extend(["", "## Post-Funding Deploy Blocks", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- 3x cUSDT to bridge: `{entry['cUSDT_to_bridge_3x']}`")
lines.append(f"- 3x cUSDC to bridge: `{entry['cUSDC_to_bridge_3x']}`")
lines.append(f"- CW_BRIDGE env: `{entry['cw_bridge_env']}`")
lines.append(f"- Recommended action: `{entry['recommended_action']}`")
lines.append("")
lines.append("```bash")
lines.append(entry["post_funding_deploy_block"])
lines.append("```")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,149 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from decimal import Decimal
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
FUNDING_ACTIONS = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-actions-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-bridge-from-138-matrix-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_BRIDGE_FROM_138_MATRIX.md"
CHAIN_SUFFIX = {
1: "MAINNET",
10: "OPTIMISM",
25: "CRONOS",
56: "BSC",
100: "GNOSIS",
137: "POLYGON",
8453: "BASE",
42161: "ARBITRUM",
42220: "CELO",
43114: "AVALANCHE",
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def needed_amounts(entry: dict) -> tuple[str, str]:
if entry["action_type"] == "seed_now":
return "0", "0"
if entry["action_type"] == "mint_missing_side_then_seed":
return "0", "0.8"
return "1000", "1000"
def recommended_action(entry: dict) -> str:
if entry["action_type"] == "seed_now":
return "seed_now"
if entry["action_type"] == "mint_missing_side_then_seed":
return "bridge_or_mint_missing_side_then_seed"
return "bridge_or_mint_destination_then_seed"
def main() -> None:
actions = load(FUNDING_ACTIONS)
entries = []
total_cusdt = Decimal("0")
total_cusdc = Decimal("0")
for entry in actions["entries"]:
chain_id = entry["chain_id"]
need_usdt, need_usdc = needed_amounts(entry)
total_cusdt += Decimal(need_usdt)
total_cusdc += Decimal(need_usdc)
suffix = CHAIN_SUFFIX[chain_id]
bridge_env = f"CW_BRIDGE_{suffix}"
bridge_config_present = bool(entry.get("bridge_possible"))
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"phase_1_pair": entry["phase_1_pair"],
"cUSDT_needed_from_138": need_usdt,
"cUSDC_needed_from_138": need_usdc,
"bridge_config_env": bridge_env,
"bridge_config_present": bridge_config_present,
"recommended_action": recommended_action(entry),
"minimum_gas_issue": entry["minimum_gas_issue"],
"tokens_missing": entry["tokens_missing"],
"bridge_note": entry["bridge_note"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 bridge-from-138 matrix",
"purpose": "Strict destination funding matrix from Chain 138 GRU v2 feeder inventory for phase-1 cWUSDT/cWUSDC rollout.",
"signer": actions["signer"],
"totals_needed_from_138": {
"cUSDT": format(total_cusdt, "f"),
"cUSDC": format(total_cusdc, "f"),
},
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-funding-actions-latest.json",
"docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md",
"docs/07-ccip/CW_BRIDGE_APPROACH.md",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Bridge-From-138 Matrix",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Signer: `{payload['signer']}`",
"- Purpose: strict destination funding matrix from Chain 138 GRU v2 feeder inventory for the remaining phase-1 pool rollout.",
f"- Total cUSDT needed from Chain 138: `{payload['totals_needed_from_138']['cUSDT']}`",
f"- Total cUSDC needed from Chain 138: `{payload['totals_needed_from_138']['cUSDC']}`",
"",
"| Chain | Network | cUSDT Needed | cUSDC Needed | Bridge Env | Bridge Present | Recommended Action |",
"|---|---|---:|---:|---|---|---|",
]
for entry in entries:
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | "
f"`{entry['cUSDT_needed_from_138']}` | "
f"`{entry['cUSDC_needed_from_138']}` | "
f"`{entry['bridge_config_env']}` | "
f"`{str(entry['bridge_config_present']).lower()}` | "
f"`{entry['recommended_action']}` |"
)
lines.extend(["", "## Notes", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Tokens missing on destination: {', '.join(f'`{x}`' for x in entry['tokens_missing']) or '`none`'}")
lines.append(f"- Minimum gas issue: `{str(entry['minimum_gas_issue']).lower()}`")
lines.append(f"- Bridge config env: `{entry['bridge_config_env']}`")
lines.append(f"- Bridge present: `{str(entry['bridge_config_present']).lower()}`")
lines.append(f"- Recommended action: `{entry['recommended_action']}`")
lines.append(f"- Bridge note: {entry['bridge_note']}")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,257 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
OPERATOR_PACKET = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-bridge-3x-operator-packet-latest.json"
)
FUNDING_ACTIONS = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-funding-actions-latest.json"
)
REPORT = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-bridge-or-mint-runbook-latest.json"
)
DOC = (
ROOT
/ "docs"
/ "03-deployment"
/ "PROMOD_UNISWAP_V2_PHASE1_BRIDGE_OR_MINT_RUNBOOK.md"
)
SOURCE_CUSDT = "0x93E66202A11B1772E55407B32B44e5Cd8eda7f22"
SOURCE_CUSDC = "0xf22258f57794CC8E06237084b353Ab30fFfa640b"
RPC_ENV = {
1: "ETHEREUM_MAINNET_RPC",
10: "OPTIMISM_MAINNET_RPC",
25: "CRONOS_RPC_URL",
56: "BSC_RPC_URL",
100: "GNOSIS_MAINNET_RPC",
137: "POLYGON_MAINNET_RPC",
8453: "BASE_MAINNET_RPC",
42161: "ARBITRUM_MAINNET_RPC",
42220: "CELO_MAINNET_RPC",
43114: "AVALANCHE_RPC_URL",
}
SELECTOR_ENV = {
1: "ETH_MAINNET_SELECTOR",
10: "OPTIMISM_SELECTOR",
25: "CRONOS_SELECTOR",
56: "BSC_SELECTOR",
100: "GNOSIS_SELECTOR",
137: "POLYGON_SELECTOR",
8453: "BASE_SELECTOR",
42161: "ARBITRUM_SELECTOR",
42220: "",
43114: "AVALANCHE_SELECTOR",
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def human_to_raw(amount: str) -> str:
if "." in amount:
whole, frac = amount.split(".", 1)
frac = (frac + "000000")[:6]
else:
whole, frac = amount, "000000"
return str(int(whole) * 1_000_000 + int(frac))
def bridge_preflight_block(entry: dict) -> str:
chain_id = entry["chain_id"]
rpc_var = RPC_ENV[chain_id]
selector_env = SELECTOR_ENV.get(chain_id, "")
lines = [
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
'export SOURCE_RPC_URL="${CHAIN138_RPC}"',
f'export DEST_RPC_URL="${{{rpc_var}}}"',
f'export DEST_CW_BRIDGE="${{{entry["cw_bridge_env"]}}}"',
f'export DEST_CWUSDT="{entry["destination_cw_addresses"]["cWUSDT"]}"',
f'export DEST_CWUSDC="{entry["destination_cw_addresses"]["cWUSDC"]}"',
f'export SOURCE_CUSDT="{SOURCE_CUSDT}"',
f'export SOURCE_CUSDC="{SOURCE_CUSDC}"',
f'export BRIDGE_CUSDT_HUMAN="{entry["cUSDT_to_bridge_3x"]}"',
f'export BRIDGE_CUSDC_HUMAN="{entry["cUSDC_to_bridge_3x"]}"',
f'export BRIDGE_CUSDT_RAW="{human_to_raw(entry["cUSDT_to_bridge_3x"])}"',
f'export BRIDGE_CUSDC_RAW="{human_to_raw(entry["cUSDC_to_bridge_3x"])}"',
]
if selector_env:
lines.append(f'export DEST_SELECTOR="${{{selector_env}}}"')
else:
lines.append("# DEST_SELECTOR env is not currently documented for this chain in smom-dbis-138/.env")
lines.extend(
[
"",
'cast call "$DEST_CW_BRIDGE" \'feeToken()(address)\' --rpc-url "$DEST_RPC_URL"',
]
)
if selector_env:
lines.append(
'cast call "$DEST_CW_BRIDGE" \'calculateFee(uint64,uint256)(uint256)\' "$DEST_SELECTOR" "$BRIDGE_CUSDC_RAW" --rpc-url "$DEST_RPC_URL"'
)
lines.extend(
[
"",
'echo "Bridge execution stays runbook-driven: follow docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md and docs/07-ccip/CW_BRIDGE_APPROACH.md with the env exported above."',
]
)
return "\n".join(lines)
def main() -> None:
operator_packet = load(OPERATOR_PACKET)
funding_actions = load(FUNDING_ACTIONS)
actions_by_chain = {entry["chain_id"]: entry for entry in funding_actions["entries"]}
entries = []
for entry in operator_packet["entries"]:
chain_id = entry["chain_id"]
action = actions_by_chain[chain_id]
selector_env = SELECTOR_ENV.get(chain_id, "")
seed_now = action["action_type"] == "seed_now"
preferred_path = (
"seed_now"
if seed_now
else "bridge_then_seed" if selector_env else "mint_then_seed"
)
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"phase_1_pair": entry["phase_1_pair"],
"preferred_path": preferred_path,
"recommended_action": entry["recommended_action"],
"cUSDT_to_bridge_3x": entry["cUSDT_to_bridge_3x"],
"cUSDC_to_bridge_3x": entry["cUSDC_to_bridge_3x"],
"destination_cw_addresses": entry["destination_cw_addresses"],
"cw_bridge_env": entry["cw_bridge_env"],
"selector_env": selector_env or None,
"bridge_preflight_block": "" if seed_now else bridge_preflight_block(entry),
"mint_fallback_steps": action["mint_steps"],
"post_funding_deploy_block": entry["post_funding_deploy_block"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 bridge-or-mint execution runbook",
"purpose": "Strict runbook keyed to the 3x operator packet: for each chain, prefer bridge when the destination bridge env and selector are present, otherwise use the exact destination mint fallback, then execute the post-funding pool deploy block.",
"source_chain": {
"chain_id": 138,
"network": "Chain 138",
"source_tokens": {
"cUSDT": SOURCE_CUSDT,
"cUSDC": SOURCE_CUSDC,
},
},
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-bridge-3x-operator-packet-latest.json",
"reports/extraction/promod-uniswap-v2-phase1-funding-actions-latest.json",
"docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md",
"docs/07-ccip/CW_BRIDGE_APPROACH.md",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Bridge-Or-Mint Execution Runbook",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: strict bridge-or-mint runbook matched to the `3x` operator packet.",
"- Source chain: `138` / Chain 138",
f"- Source cUSDT: `{SOURCE_CUSDT}`",
f"- Source cUSDC: `{SOURCE_CUSDC}`",
"",
"## Operating Rule",
"",
"1. If the chain is already funded, seed the pool directly.",
"2. If the chain has a documented selector env and `CW_BRIDGE_<CHAIN>`, prefer bridge preflight and then follow the cW bridge runbooks.",
"3. If the bridge path is not fully parameterized in repo env, use the exact destination mint fallback block.",
"4. After funding lands, execute the post-funding deploy block and then refresh discovery and promotion gates.",
"",
"| Chain | Network | Preferred Path | 3x cUSDT | 3x cUSDC | CW_BRIDGE Env | Selector Env |",
"|---|---|---|---:|---:|---|---|",
]
for entry in entries:
selector_env = f"`{entry['selector_env']}`" if entry["selector_env"] else "`missing`"
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['preferred_path']}` | "
f"`{entry['cUSDT_to_bridge_3x']}` | `{entry['cUSDC_to_bridge_3x']}` | "
f"`{entry['cw_bridge_env']}` | {selector_env} |"
)
lines.extend(["", "## Per-Chain Runbook", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Preferred path: `{entry['preferred_path']}`")
lines.append(f"- Recommended action: `{entry['recommended_action']}`")
lines.append(f"- 3x cUSDT from 138: `{entry['cUSDT_to_bridge_3x']}`")
lines.append(f"- 3x cUSDC from 138: `{entry['cUSDC_to_bridge_3x']}`")
lines.append(f"- Destination cWUSDT: `{entry['destination_cw_addresses']['cWUSDT']}`")
lines.append(f"- Destination cWUSDC: `{entry['destination_cw_addresses']['cWUSDC']}`")
lines.append(f"- CW_BRIDGE env: `{entry['cw_bridge_env']}`")
lines.append(
f"- Selector env: `{entry['selector_env']}`"
if entry["selector_env"]
else "- Selector env: `missing`"
)
lines.append("")
if entry["bridge_preflight_block"]:
lines.append("Bridge preflight block:")
lines.append("```bash")
lines.append(entry["bridge_preflight_block"])
lines.append("```")
lines.append("")
if entry["mint_fallback_steps"]:
lines.append("Mint fallback blocks:")
for step in entry["mint_fallback_steps"]:
lines.append(f"- Mint `{step['token']}` `{step['amount_human']}`:")
lines.append("```bash")
lines.append(step["exact_mint_block"])
lines.append("```")
lines.append("")
lines.append("Post-funding deploy block:")
lines.append("```bash")
lines.append(entry["post_funding_deploy_block"])
lines.append("```")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
from __future__ import annotations
from pathlib import Path
import json
import time
ROOT = Path(__file__).resolve().parents[2]
PHASE_ORDER = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase-order-latest.json"
PROMOTION_GATES = ROOT / "reports" / "extraction" / "promod-uniswap-v2-promotion-gates-latest.json"
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-execution-checklist-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_EXECUTION_CHECKLIST.md"
RPC_ENV_KEYS = {
1: ["ETHEREUM_MAINNET_RPC"],
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"],
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def token_addresses(chain_status: dict, pair: str) -> dict[str, str]:
token_a, token_b = pair.split("/")
cw = chain_status.get("cwTokens", {})
anchors = chain_status.get("anchorAddresses", {})
def resolve(symbol: str) -> str:
if symbol in cw:
return cw[symbol]
return anchors.get(symbol, "")
return {token_a: resolve(token_a), token_b: resolve(token_b)}
def main():
phase = load(PHASE_ORDER)
gates = load(PROMOTION_GATES)
status = load(DEPLOYMENT_STATUS)
gates_by_chain = {entry["chain_id"]: entry for entry in gates["entries"]}
entries = []
for entry in phase["entries"]:
chain_id = entry["chain_id"]
chain_status = status["chains"].get(str(chain_id), {})
gate = gates_by_chain.get(chain_id, {})
pair = entry["phase_1_core_rail"]
tokens = token_addresses(chain_status, pair)
token_a, token_b = pair.split("/")
env_vars = [item["name"] for item in gate.get("exact_env_vars_to_fill", [])]
rpc_keys = RPC_ENV_KEYS.get(chain_id, [])
factory_var = next((name for name in env_vars if name.endswith("_FACTORY")), "")
router_var = next((name for name in env_vars if name.endswith("_ROUTER")), "")
checklist = [
f"Confirm `{factory_var}`, `{router_var}`, and `{next((name for name in env_vars if name.endswith('_START_BLOCK')), '')}` are set in `smom-dbis-138/.env`.",
f"Confirm one RPC env is usable for chain `{chain_id}`: {', '.join(f'`{key}`' for key in rpc_keys)}.",
f"Verify token addresses for `{token_a}` and `{token_b}` in `cross-chain-pmm-lps/config/deployment-status.json` before sending any transaction.",
f"Probe the factory first: `cast call \"${{{factory_var}}}\" \"getPair(address,address)(address)\" {tokens.get(token_a,'0x...')} {tokens.get(token_b,'0x...')} --rpc-url \"${{{rpc_keys[0] if rpc_keys else 'RPC_URL'}}}\"`.",
f"If the pair is absent, create it: `cast send \"${{{factory_var}}}\" \"createPair(address,address)(address)\" {tokens.get(token_a,'0x...')} {tokens.get(token_b,'0x...')} --rpc-url \"${{{rpc_keys[0] if rpc_keys else 'RPC_URL'}}}\" --private-key \"$PRIVATE_KEY\"`.",
f"Approve both phase-1 tokens to the router and seed initial liquidity on `{pair}` with `addLiquidity(...)` through `\"${{{router_var}}}\"`.",
"Rebuild live pair discovery and write the discovered pair into `deployment-status.json`.",
"Re-run promotion gates and do not move to the next chain until the current one is discoverable and recorded.",
]
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"phase_1_core_rail": pair,
"rpc_env_keys": rpc_keys,
"required_uniswap_v2_env_vars": env_vars,
"token_addresses": tokens,
"remaining_live_blockers": entry.get("remaining_live_blockers", []),
"execution_checklist": checklist,
"post_phase_1_commands": entry.get("post_phase_1_commands", []),
}
)
payload = {
"generated_at": now(),
"program_name": phase["program_name"],
"purpose": "Exact chain-by-chain operator checklist for the first full phase 1 rollout of Mr. Promod's Uniswap V2 core rail.",
"mainnet_funding_posture": phase["mainnet_funding_posture"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase-order-latest.json",
"reports/extraction/promod-uniswap-v2-promotion-gates-latest.json",
"cross-chain-pmm-lps/config/deployment-status.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Execution Checklist",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: {payload['program_name']}",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
"- Purpose: exact chain-by-chain checklist for the first full phase 1 rollout of `cWUSDT/cWUSDC`.",
"",
"## Global Rule",
"",
"- Do not advance to the next chain until the current chain's `cWUSDT/cWUSDC` pair is discoverable on-chain and recordable in `deployment-status.json`.",
"",
"| Chain | Network | Phase 1 Pair | Required Env | RPC Keys | Required Token Addresses |",
"|---|---|---|---|---|---|",
]
for entry in entries:
envs = ", ".join(f"`{item}`" for item in entry["required_uniswap_v2_env_vars"])
rpcs = ", ".join(f"`{item}`" for item in entry["rpc_env_keys"])
tokens = ", ".join(f"`{k}={v}`" for k, v in entry["token_addresses"].items())
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['phase_1_core_rail']}` | {envs} | {rpcs} | {tokens} |"
)
lines.extend(
[
"",
"## Execution Steps",
"",
]
)
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
for idx, item in enumerate(entry["execution_checklist"], 1):
lines.append(f"{idx}. {item}")
lines.append("")
lines.append("Post-phase-1 commands:")
for cmd in entry["post_phase_1_commands"]:
lines.append(f"- `{cmd}`")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,202 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from decimal import Decimal
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
FUNDING = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-bundle-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-actions-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_FUNDING_ACTIONS.md"
CHAIN_SUFFIX = {
1: "MAINNET",
10: "OPTIMISM",
25: "CRONOS",
56: "BSC",
100: "GNOSIS",
137: "POLYGON",
8453: "BASE",
42161: "ARBITRUM",
42220: "CELO",
43114: "AVALANCHE",
}
RPC_ENV = {
1: "ETHEREUM_MAINNET_RPC",
10: "OPTIMISM_MAINNET_RPC",
25: "CRONOS_RPC_URL",
56: "BSC_RPC_URL",
100: "GNOSIS_MAINNET_RPC",
137: "POLYGON_MAINNET_RPC",
8453: "BASE_MAINNET_RPC",
42161: "ARBITRUM_MAINNET_RPC",
42220: "CELO_MAINNET_RPC",
43114: "AVALANCHE_RPC_URL",
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def mint_block(chain_id: int, token_symbol: str, token_address: str, amount_raw: int) -> str:
rpc_var = RPC_ENV[chain_id]
lines = [
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
f'cast send "{token_address}" \'mint(address,uint256)\' "$(cast wallet address --private-key "$PRIVATE_KEY")" "{amount_raw}" \\',
f' --rpc-url "${{{rpc_var}}}" --private-key "$PRIVATE_KEY" --legacy --gas-limit 100000',
]
return "\n".join(lines)
def gas_topup_note(chain_id: int) -> str:
if chain_id == 10:
return "Top up native gas on Optimism before minting or seeding; current balance is below the 0.001 safety threshold."
if chain_id == 8453:
return "Top up native gas on Base before minting or seeding; current balance is below the 0.001 safety threshold."
return "No minimum gas top-up issue from the latest preflight snapshot."
def bridge_note(chain_id: int) -> str:
suffix = CHAIN_SUFFIX[chain_id]
return (
f"Bridge path is structurally available for chain `{chain_id}` via `CW_BRIDGE_{suffix}` and `bridgeAvailable=true`, "
"but the repo-native executable path today is destination-side cW minting. Cross-chain c* -> cW delivery still follows "
"`docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md` and `docs/07-ccip/CW_BRIDGE_APPROACH.md` rather than a single helper script."
)
def main() -> None:
bundle = load(FUNDING)
entries = []
for entry in bundle["entries"]:
chain_id = entry["chain_id"]
tokens_missing = entry["tokens_missing"]
max_seed = Decimal(entry["max_equal_seed_human"])
if entry["funding_ready_now"]:
action = "seed_now"
recommended_seed_human = str(max_seed)
mint_steps = []
else:
if chain_id == 43114 and tokens_missing == ["cWUSDC"]:
action = "mint_missing_side_then_seed"
recommended_seed_human = "0.8"
mint_steps = [
{
"token": "cWUSDC",
"amount_human": "0.8",
"amount_raw": 800000,
"exact_mint_block": mint_block(chain_id, "cWUSDC", entry["token_addresses"]["cWUSDC"], 800000),
}
]
else:
action = "mint_destination_then_seed"
recommended_seed_human = "1000"
mint_steps = []
for token in tokens_missing:
mint_steps.append(
{
"token": token,
"amount_human": "1000",
"amount_raw": 1000000000,
"exact_mint_block": mint_block(chain_id, token, entry["token_addresses"][token], 1000000000),
}
)
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"phase_1_pair": entry["phase_1_pair"],
"action_type": action,
"tokens_missing": tokens_missing,
"minimum_gas_issue": entry["minimum_gas_issue"],
"gas_topup_note": gas_topup_note(chain_id),
"recommended_seed_human": recommended_seed_human,
"bridge_possible": True,
"bridge_note": bridge_note(chain_id),
"mint_steps": mint_steps,
"exact_post_funding_deploy_block": entry["exact_post_funding_deploy_block"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 funding actions",
"purpose": "Strict phase-1 funding actions: seed-now, mint-then-seed, and bridge-possible notes for each target chain.",
"signer": bundle["signer"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-funding-bundle-latest.json",
"docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md",
"docs/07-ccip/CW_BRIDGE_APPROACH.md",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Funding Actions",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Signer: `{payload['signer']}`",
"- Purpose: strict per-chain action plan for phase-1 funding and deployment.",
"",
"| Chain | Network | Action | Tokens Missing | Gas Issue | Recommended Seed |",
"|---|---|---|---|---|---:|",
]
for entry in entries:
missing = ", ".join(f"`{t}`" for t in entry["tokens_missing"]) or "`none`"
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['action_type']}` | {missing} | "
f"`{str(entry['minimum_gas_issue']).lower()}` | `{entry['recommended_seed_human']}` |"
)
lines.extend(["", "## Per-Chain Actions", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Action: `{entry['action_type']}`")
lines.append(f"- Tokens missing: {', '.join(f'`{t}`' for t in entry['tokens_missing']) or '`none`'}")
lines.append(f"- Gas issue: `{str(entry['minimum_gas_issue']).lower()}`")
lines.append(f"- Gas note: {entry['gas_topup_note']}")
lines.append(f"- Bridge possible: `{str(entry['bridge_possible']).lower()}`")
lines.append(f"- Bridge note: {entry['bridge_note']}")
lines.append("")
if entry["mint_steps"]:
lines.append("Mint steps:")
for step in entry["mint_steps"]:
lines.append(f"- Mint `{step['token']}` `{step['amount_human']}` with:")
lines.append("```bash")
lines.append(step["exact_mint_block"])
lines.append("```")
if entry["exact_post_funding_deploy_block"]:
lines.append("Post-funding deploy block:")
lines.append("```bash")
lines.append(entry["exact_post_funding_deploy_block"])
lines.append("```")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,175 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from decimal import Decimal
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
FUNDING = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-readiness-latest.json"
CHECKLIST = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-execution-checklist-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-bundle-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_FUNDING_BUNDLE.md"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def env_ref(name: str) -> str:
return "${" + name + "}"
def deploy_block(entry: dict, funding_entry: dict) -> str:
token_addresses = entry["token_addresses"]
cwusdt = token_addresses["cWUSDT"]
cwusdc = token_addresses["cWUSDC"]
envs = entry["required_uniswap_v2_env_vars"]
factory_var = next(name for name in envs if name.endswith("_FACTORY"))
router_var = next(name for name in envs if name.endswith("_ROUTER"))
rpc_var = entry["rpc_env_keys"][0]
amount_raw = min(
int(funding_entry["cwusdt"]["balance_raw"]),
int(funding_entry["cwusdc"]["balance_raw"]),
)
lines = [
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
f'export RPC_URL="{env_ref(rpc_var)}"',
f'export FACTORY="{env_ref(factory_var)}"',
f'export ROUTER="{env_ref(router_var)}"',
f'export CWUSDT="{cwusdt}"',
f'export CWUSDC="{cwusdc}"',
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
f'export AMOUNT_RAW="{amount_raw}"',
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
"",
'PAIR="$(cast call "$FACTORY" \'getPair(address,address)(address)\' "$CWUSDT" "$CWUSDC" --rpc-url "$RPC_URL")"',
'if [[ "$PAIR" == "0x0000000000000000000000000000000000000000" ]]; then',
' cast send "$FACTORY" \'createPair(address,address)(address)\' "$CWUSDT" "$CWUSDC" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"fi",
"",
'cast send "$CWUSDT" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$CWUSDC" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$ROUTER" \\',
" 'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)' \\",
' "$CWUSDT" "$CWUSDC" "$AMOUNT_RAW" "$AMOUNT_RAW" "$AMOUNT_RAW" "$AMOUNT_RAW" "$SIGNER" "$DEADLINE" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
"python3 scripts/lib/promod_uniswap_v2_live_pair_discovery.py --write-discovered",
"bash scripts/verify/build-promod-uniswap-v2-promotion-gates.sh",
"node cross-chain-pmm-lps/scripts/validate-deployment-status.cjs cross-chain-pmm-lps/config/deployment-status.json",
]
return "\n".join(lines)
def main() -> None:
funding = load(FUNDING)
checklist = load(CHECKLIST)
funding_by_chain = {entry["chain_id"]: entry for entry in funding["entries"]}
checklist_by_chain = {entry["chain_id"]: entry for entry in checklist["entries"]}
entries = []
for chain_id in [1, 10, 25, 56, 100, 137, 8453, 42161, 42220, 43114]:
f = funding_by_chain[chain_id]
c = checklist_by_chain[chain_id]
missing_tokens = []
if Decimal(f["cwusdt"]["balance_human"]) <= 0:
missing_tokens.append("cWUSDT")
if Decimal(f["cwusdc"]["balance_human"]) <= 0:
missing_tokens.append("cWUSDC")
gas_issue = not f["native_balance_ok_for_ops"]
exact_block = deploy_block(c, f) if f["funding_ready_now"] else None
entries.append(
{
"chain_id": chain_id,
"network": f["network"],
"phase_1_pair": f["phase_1_pair"],
"tokens_missing": missing_tokens,
"minimum_gas_issue": gas_issue,
"native_balance": f["native_balance"],
"max_equal_seed_human": f["max_equal_seed_human"],
"funding_ready_now": f["funding_ready_now"],
"exact_post_funding_deploy_block": exact_block,
"rpc_env_key": f["rpc_env_key"],
"router": f["router"],
"factory": f["factory"],
"token_addresses": c["token_addresses"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 funding bundle",
"purpose": "Strict per-chain funding bundle: missing tokens, gas issue, and exact deploy block to run once each chain is funded.",
"signer": funding["signer"],
"phase_1_pair": funding["phase_1_pair"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-funding-readiness-latest.json",
"reports/extraction/promod-uniswap-v2-phase1-execution-checklist-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Funding Bundle",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Signer: `{payload['signer']}`",
"- Purpose: strict per-chain funding bundle for `cWUSDT/cWUSDC` phase-1 rollout.",
"",
"| Chain | Network | Tokens Missing | Minimum Gas Issue | Max Equal Seed | Funding Ready Now |",
"|---|---|---|---|---:|---|",
]
for entry in entries:
missing = ", ".join(f"`{x}`" for x in entry["tokens_missing"]) or "`none`"
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | {missing} | "
f"`{str(entry['minimum_gas_issue']).lower()}` | "
f"`{entry['max_equal_seed_human']}` | "
f"`{str(entry['funding_ready_now']).lower()}` |"
)
lines.append("")
lines.append("## Exact Post-Funding Deploy Blocks")
lines.append("")
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Tokens missing: {', '.join(f'`{x}`' for x in entry['tokens_missing']) or '`none`'}")
lines.append(f"- Minimum gas issue: `{str(entry['minimum_gas_issue']).lower()}`")
lines.append(f"- Max equal seed after funding snapshot: `{entry['max_equal_seed_human']}`")
lines.append("")
if entry["exact_post_funding_deploy_block"]:
lines.append("```bash")
lines.append(entry["exact_post_funding_deploy_block"])
lines.append("```")
else:
lines.append("Post-funding deploy block becomes executable once the missing token and gas blockers are cleared. Use the chain-specific execution checklist plus this chain's env and token addresses.")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,314 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import subprocess
import time
from decimal import Decimal, getcontext
from pathlib import Path
getcontext().prec = 50
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase1-funding-readiness-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE1_FUNDING_READINESS.md"
TARGET_CHAINS = [1, 10, 25, 56, 100, 137, 8453, 42161, 42220, 43114]
RPC_ENV_KEYS = {
1: "ETHEREUM_MAINNET_RPC",
10: "OPTIMISM_MAINNET_RPC",
25: "CRONOS_RPC_URL",
56: "BSC_RPC_URL",
100: "GNOSIS_MAINNET_RPC",
137: "POLYGON_MAINNET_RPC",
8453: "BASE_MAINNET_RPC",
42161: "ARBITRUM_MAINNET_RPC",
42220: "CELO_MAINNET_RPC",
43114: "AVALANCHE_RPC_URL",
}
NETWORK_NAMES = {
1: "Ethereum Mainnet",
10: "Optimism",
25: "Cronos",
56: "BSC",
100: "Gnosis",
137: "Polygon",
8453: "Base",
42161: "Arbitrum One",
42220: "Celo",
43114: "Avalanche",
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def write_json(path: Path, payload: dict) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def run(cmd: list[str], timeout: int = 8) -> tuple[int, str, str]:
proc = subprocess.run(cmd, text=True, capture_output=True, timeout=timeout)
return proc.returncode, proc.stdout.strip(), proc.stderr.strip()
def load_env() -> dict[str, str]:
script = (
"source smom-dbis-138/scripts/load-env.sh >/dev/null && "
"python3 - <<'PY'\n"
"import json, os\n"
"keys = [\n"
" 'PRIVATE_KEY',\n"
" 'ETHEREUM_MAINNET_RPC','OPTIMISM_MAINNET_RPC','CRONOS_RPC_URL','BSC_RPC_URL',\n"
" 'GNOSIS_MAINNET_RPC','POLYGON_MAINNET_RPC','BASE_MAINNET_RPC','ARBITRUM_MAINNET_RPC',\n"
" 'CELO_MAINNET_RPC','AVALANCHE_RPC_URL',\n"
" 'CHAIN_1_UNISWAP_V2_FACTORY','CHAIN_1_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_10_UNISWAP_V2_FACTORY','CHAIN_10_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_25_UNISWAP_V2_FACTORY','CHAIN_25_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_56_UNISWAP_V2_FACTORY','CHAIN_56_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_100_UNISWAP_V2_FACTORY','CHAIN_100_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_137_UNISWAP_V2_FACTORY','CHAIN_137_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_8453_UNISWAP_V2_FACTORY','CHAIN_8453_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_42161_UNISWAP_V2_FACTORY','CHAIN_42161_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_42220_UNISWAP_V2_FACTORY','CHAIN_42220_UNISWAP_V2_ROUTER',\n"
" 'CHAIN_43114_UNISWAP_V2_FACTORY','CHAIN_43114_UNISWAP_V2_ROUTER'\n"
"]\n"
"print(json.dumps({k: os.environ.get(k, '') for k in keys}))\n"
"PY"
)
rc, out, err = run(["bash", "-lc", script], timeout=10)
if rc != 0:
raise RuntimeError(f"failed to load env: {err or out}")
return json.loads(out)
def signer_address(private_key: str) -> str:
rc, out, err = run(["cast", "wallet", "address", "--private-key", private_key], timeout=10)
if rc != 0:
raise RuntimeError(f"failed to derive signer: {err or out}")
return out
def chain_entry(status: dict, chain_id: int) -> dict:
return status["chains"].get(str(chain_id)) or status["chains"].get(chain_id) or {}
def token_address(status: dict, chain_id: int, symbol: str) -> str:
value = chain_entry(status, chain_id).get("cwTokens", {}).get(symbol)
if isinstance(value, dict):
return value.get("address") or value.get("token") or ""
return value or ""
def token_decimals(rpc_url: str, token: str) -> int:
rc, out, _ = run(["cast", "call", token, "decimals()(uint8)", "--rpc-url", rpc_url], timeout=5)
return int(out) if rc == 0 and out else 18
def token_value_human(raw: str, decimals: int) -> str:
raw_int = int(raw.split()[0], 0)
return format(Decimal(raw_int) / (Decimal(10) ** decimals), "f")
def min_human(a: str, b: str) -> str:
da = Decimal(a)
db = Decimal(b)
return format(da if da <= db else db, "f")
def pair_reserves(rpc_url: str, pair: str) -> tuple[str, str]:
if pair in ("", "0x0000000000000000000000000000000000000000"):
return "0", "0"
rc, out, _ = run(["cast", "call", pair, "getReserves()(uint112,uint112,uint32)", "--rpc-url", rpc_url], timeout=5)
if rc != 0 or not out:
return "0", "0"
parts = out.split()
if len(parts) < 2:
return "0", "0"
return parts[0], parts[1]
def main() -> None:
env = load_env()
status = json.loads(DEPLOYMENT_STATUS.read_text())
signer = signer_address(env["PRIVATE_KEY"])
entries: list[dict] = []
completed = []
ready_now = []
needs_funding = []
for chain_id in TARGET_CHAINS:
rpc_key = RPC_ENV_KEYS[chain_id]
rpc_url = env.get(rpc_key, "")
factory = env.get(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", "")
router = env.get(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", "")
cwusdt = token_address(status, chain_id, "cWUSDT")
cwusdc = token_address(status, chain_id, "cWUSDC")
pair_rc, pair_out, _ = run(
["cast", "call", factory, "getPair(address,address)(address)", cwusdt, cwusdc, "--rpc-url", rpc_url],
timeout=5,
)
gas_rc, gas_out, _ = run(["cast", "balance", signer, "--ether", "--rpc-url", rpc_url], timeout=5)
cwusdt_decimals = token_decimals(rpc_url, cwusdt)
cwusdc_decimals = token_decimals(rpc_url, cwusdc)
b1_rc, b1_out, _ = run(
["cast", "call", cwusdt, "balanceOf(address)(uint256)", signer, "--rpc-url", rpc_url],
timeout=5,
)
b2_rc, b2_out, _ = run(
["cast", "call", cwusdc, "balanceOf(address)(uint256)", signer, "--rpc-url", rpc_url],
timeout=5,
)
a1_rc, a1_out, _ = run(
["cast", "call", cwusdt, "allowance(address,address)(uint256)", signer, router, "--rpc-url", rpc_url],
timeout=5,
)
a2_rc, a2_out, _ = run(
["cast", "call", cwusdc, "allowance(address,address)(uint256)", signer, router, "--rpc-url", rpc_url],
timeout=5,
)
cwusdt_raw = b1_out.split()[0] if b1_rc == 0 and b1_out else "0"
cwusdc_raw = b2_out.split()[0] if b2_rc == 0 and b2_out else "0"
cwusdt_human = token_value_human(cwusdt_raw, cwusdt_decimals)
cwusdc_human = token_value_human(cwusdc_raw, cwusdc_decimals)
max_seed = min_human(cwusdt_human, cwusdc_human)
gas_human = gas_out if gas_rc == 0 else "0"
pair_address = pair_out if pair_rc == 0 else ""
reserve0_raw, reserve1_raw = pair_reserves(rpc_url, pair_address)
pair_seeded_live = pair_address not in ("", "0x0000000000000000000000000000000000000000") and (
int(reserve0_raw, 0) > 0 or int(reserve1_raw, 0) > 0
)
has_both_sides = Decimal(cwusdt_human) > 0 and Decimal(cwusdc_human) > 0
has_gas = Decimal(gas_human) > Decimal("0.001")
funding_ready = has_both_sides and has_gas and pair_address == "0x0000000000000000000000000000000000000000"
entry = {
"chain_id": chain_id,
"network": NETWORK_NAMES[chain_id],
"phase_1_pair": "cWUSDT/cWUSDC",
"signer": signer,
"rpc_env_key": rpc_key,
"factory": factory,
"router": router,
"pair_address": pair_address,
"pair_exists": pair_address not in ("", "0x0000000000000000000000000000000000000000"),
"pair_seeded_live": pair_seeded_live,
"native_balance": gas_human,
"native_balance_ok_for_ops": has_gas,
"cwusdt": {
"address": cwusdt,
"decimals": cwusdt_decimals,
"balance_raw": cwusdt_raw,
"balance_human": cwusdt_human,
"allowance_raw": a1_out.split()[0] if a1_rc == 0 and a1_out else "0",
},
"cwusdc": {
"address": cwusdc,
"decimals": cwusdc_decimals,
"balance_raw": cwusdc_raw,
"balance_human": cwusdc_human,
"allowance_raw": a2_out.split()[0] if a2_rc == 0 and a2_out else "0",
},
"max_equal_seed_human": max_seed,
"reserve0_raw": reserve0_raw,
"reserve1_raw": reserve1_raw,
"execution_status": "completed" if pair_seeded_live else ("ready_now" if funding_ready else "needs_funding"),
"funding_ready_now": funding_ready,
"funding_blockers": [],
}
if not has_gas:
entry["funding_blockers"].append("native gas below 0.001")
if Decimal(cwusdt_human) <= 0:
entry["funding_blockers"].append("cWUSDT balance is zero")
if Decimal(cwusdc_human) <= 0:
entry["funding_blockers"].append("cWUSDC balance is zero")
if entry["pair_exists"] and not pair_seeded_live:
entry["funding_blockers"].append("pair exists but is not yet seeded")
if pair_seeded_live:
completed.append(chain_id)
elif funding_ready:
ready_now.append(chain_id)
else:
needs_funding.append(chain_id)
entries.append(entry)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 funding readiness",
"purpose": "Live deployer-wallet funding view for seeding cWUSDT/cWUSDC phase-1 pools chain by chain.",
"signer": signer,
"phase_1_pair": "cWUSDT/cWUSDC",
"completed_chain_ids": completed,
"ready_now_chain_ids": ready_now,
"needs_funding_chain_ids": needs_funding,
"entries": entries,
"source_artifacts": [
"cross-chain-pmm-lps/config/deployment-status.json",
"smom-dbis-138/.env",
"smom-dbis-138/scripts/load-env.sh",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Funding Readiness",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Signer: `{signer}`",
"- Purpose: live deployer-wallet funding view for seeding `cWUSDT/cWUSDC` phase-1 pools chain by chain.",
f"- Completed: {', '.join(f'`{cid}`' for cid in completed) if completed else 'none'}",
f"- Ready now: {', '.join(f'`{cid}`' for cid in ready_now) if ready_now else 'none'}",
f"- Needs funding: {', '.join(f'`{cid}`' for cid in needs_funding) if needs_funding else 'none'}",
"",
"| Chain | Network | Status | Pair Exists | Seeded Live | Native Gas | cWUSDT | cWUSDC | Max Equal Seed |",
"|---|---|---|---|---|---:|---:|---:|---:|",
]
for entry in entries:
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | "
f"`{entry['execution_status']}` | "
f"`{str(entry['pair_exists']).lower()}` | "
f"`{str(entry['pair_seeded_live']).lower()}` | "
f"`{entry['native_balance']}` | "
f"`{entry['cwusdt']['balance_human']}` | "
f"`{entry['cwusdc']['balance_human']}` | "
f"`{entry['max_equal_seed_human']}` |"
)
lines.extend(["", "## Blockers", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- execution status: `{entry['execution_status']}`")
lines.append(f"- pair seeded live: `{str(entry['pair_seeded_live']).lower()}`")
if entry["funding_blockers"]:
for blocker in entry["funding_blockers"]:
lines.append(f"- {blocker}")
else:
lines.append("- no funding blockers")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,112 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
SOURCE = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-bridge-or-mint-runbook-latest.json"
)
DOC = (
ROOT
/ "docs"
/ "03-deployment"
/ "PROMOD_UNISWAP_V2_PHASE1_REMAINING_8_OPERATOR_PASTE_PACK.md"
)
REPORT = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-remaining-8-operator-paste-pack-latest.json"
)
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main() -> None:
source = load(SOURCE)
entries = [e for e in source["entries"] if e["chain_id"] not in (1, 137)]
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 remaining 8 operator paste pack",
"purpose": "Flat paste-ready operator pack for the remaining eight phase-1 chains after chain 1 and 137.",
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-bridge-or-mint-runbook-latest.json"
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 1 Remaining 8 Operator Paste Pack",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: one flat paste-ready pack for the remaining eight phase-1 chains after `1` and `137`.",
"",
"| Order | Chain | Network | Preferred Path | 3x cUSDT | 3x cUSDC | CW_BRIDGE Env | Selector Env |",
"|---|---|---|---|---:|---:|---|---|",
]
for idx, entry in enumerate(entries, start=1):
lines.append(
f"| `{idx}` | `{entry['chain_id']}` | {entry['network']} | `{entry['preferred_path']}` | "
f"`{entry['cUSDT_to_bridge_3x']}` | `{entry['cUSDC_to_bridge_3x']}` | "
f"`{entry['cw_bridge_env']}` | `{entry['selector_env'] or 'missing'}` |"
)
lines.extend(["", "## Flat Paste Pack", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Preferred path: `{entry['preferred_path']}`")
lines.append(f"- 3x cUSDT from 138: `{entry['cUSDT_to_bridge_3x']}`")
lines.append(f"- 3x cUSDC from 138: `{entry['cUSDC_to_bridge_3x']}`")
lines.append(f"- CW_BRIDGE env: `{entry['cw_bridge_env']}`")
lines.append(f"- Selector env: `{entry['selector_env'] or 'missing'}`")
lines.append("")
if entry.get("bridge_preflight_block"):
lines.append("Bridge preflight:")
lines.append("```bash")
lines.append(entry["bridge_preflight_block"])
lines.append("```")
lines.append("")
if entry.get("mint_fallback_steps"):
lines.append("Mint fallback:")
for step in entry["mint_fallback_steps"]:
lines.append(f"- `{step['token']}` `{step['amount_human']}`")
lines.append("```bash")
lines.append(step["exact_mint_block"])
lines.append("```")
lines.append("")
lines.append("Post-funding deploy:")
lines.append("```bash")
lines.append(entry["post_funding_deploy_block"])
lines.append("```")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
SOURCE = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-remaining-8-operator-paste-pack-latest.json"
)
SCRIPT_OUT = (
ROOT
/ "docs"
/ "03-deployment"
/ "PROMOD_UNISWAP_V2_PHASE1_REMAINING_8_SHELL_PASTE_PACK.sh"
)
REPORT = (
ROOT
/ "reports"
/ "extraction"
/ "promod-uniswap-v2-phase1-remaining-8-shell-paste-pack-latest.json"
)
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main() -> None:
source = load(SOURCE)
entries = source["entries"]
shell_blocks: list[str] = []
for entry in entries:
if entry.get("bridge_preflight_block"):
shell_blocks.append(entry["bridge_preflight_block"].strip())
for step in entry.get("mint_fallback_steps", []):
shell_blocks.append(step["exact_mint_block"].strip())
shell_blocks.append(entry["post_funding_deploy_block"].strip())
script_text = "\n\n".join(shell_blocks)
write_text(SCRIPT_OUT, script_text)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 1 remaining 8 shell paste pack",
"purpose": "Pure shell-only paste pack with the exact remaining eight phase-1 blocks in execution order and no headings.",
"output_script": "docs/03-deployment/PROMOD_UNISWAP_V2_PHASE1_REMAINING_8_SHELL_PASTE_PACK.sh",
"chain_order": [entry["chain_id"] for entry in entries],
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase1-remaining-8-operator-paste-pack-latest.json"
],
}
write_json(REPORT, payload)
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
SOURCE = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-operator-sequence-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-operator-paste-pack-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_OPERATOR_PASTE_PACK.md"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main() -> None:
source = load(SOURCE)
entries = []
for chain in source["entries"]:
for pair in chain["phase_2_pairs"]:
entries.append(
{
"chain_id": chain["chain_id"],
"network": chain["network"],
"pair": pair["pair"],
"amount_env_a": pair["amount_env_a"],
"amount_env_b": pair["amount_env_b"],
"probe_block": pair["probe_block"],
"create_if_absent_block": pair["create_if_absent_block"],
"deploy_block": pair["deploy_block"],
"post_pair_commands": chain["post_pair_commands"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 2 operator paste pack",
"purpose": "Flat paste-ready operator pack for every phase-2 cW* wrapped-mesh pair in execution order.",
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase2-operator-sequence-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 2 Operator Paste Pack",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: flat paste-ready operator pack for every phase-2 wrapped-mesh pair.",
"",
"| Order | Chain | Network | Pair | Amount Envs |",
"|---|---|---|---|---|",
]
for idx, entry in enumerate(entries, start=1):
lines.append(
f"| `{idx}` | `{entry['chain_id']}` | {entry['network']} | `{entry['pair']}` | "
f"`{entry['amount_env_a']}`, `{entry['amount_env_b']}` |"
)
lines.extend(["", "## Flat Paste Pack", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']} — `{entry['pair']}`")
lines.append("")
lines.append("Probe:")
lines.append("```bash")
lines.append(entry["probe_block"])
lines.append("```")
lines.append("")
lines.append("Create if absent:")
lines.append("```bash")
lines.append(entry["create_if_absent_block"])
lines.append("```")
lines.append("")
lines.append("Deploy:")
lines.append("```bash")
lines.append(entry["deploy_block"])
lines.append("```")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,246 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import re
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
PHASE_ORDER = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase-order-latest.json"
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-operator-sequence-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_OPERATOR_SEQUENCE.md"
RPC_ENV_KEYS = {
1: ["ETHEREUM_MAINNET_RPC"],
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"],
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def chain_entry(status: dict, chain_id: int) -> dict:
return status["chains"].get(str(chain_id)) or status["chains"].get(chain_id) or {}
def resolve_token(chain_status: dict, symbol: str) -> str:
cw = chain_status.get("cwTokens", {})
value = cw.get(symbol, "")
if isinstance(value, dict):
return value.get("address") or value.get("token") or ""
return value or ""
def sanitize(text: str) -> str:
return re.sub(r"[^A-Z0-9]+", "_", text.upper()).strip("_")
def amount_envs(chain_id: int, pair: str) -> tuple[str, str]:
token_a, token_b = pair.split("/")
prefix = f"PHASE2_{chain_id}_{sanitize(token_a)}_{sanitize(token_b)}"
return f"{prefix}_A_RAW", f"{prefix}_B_RAW"
def pair_commands(chain_id: int, pair: str, token_a_addr: str, token_b_addr: str, rpc_key: str) -> dict:
factory_var = f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY"
router_var = f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER"
amount_a_env, amount_b_env = amount_envs(chain_id, pair)
token_a, token_b = pair.split("/")
prelude = [
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
f'export RPC_URL="${{{rpc_key}}}"',
f'export FACTORY="${{{factory_var}}}"',
f'export ROUTER="${{{router_var}}}"',
f'export TOKEN_A="{token_a_addr}"',
f'export TOKEN_B="{token_b_addr}"',
f'export AMOUNT_A_RAW="${{{amount_a_env}:-}}"',
f'export AMOUNT_B_RAW="${{{amount_b_env}:-}}"',
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
'test -n "$AMOUNT_A_RAW" && test -n "$AMOUNT_B_RAW"',
]
probe = 'cast call "$FACTORY" \'getPair(address,address)(address)\' "$TOKEN_A" "$TOKEN_B" --rpc-url "$RPC_URL"'
create = "\n".join(
prelude
+ [
'PAIR="$(cast call "$FACTORY" \'getPair(address,address)(address)\' "$TOKEN_A" "$TOKEN_B" --rpc-url "$RPC_URL")"',
'if [[ "$PAIR" == "0x0000000000000000000000000000000000000000" ]]; then',
' cast send "$FACTORY" \'createPair(address,address)(address)\' "$TOKEN_A" "$TOKEN_B" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"fi",
]
)
deploy = "\n".join(
prelude
+ [
'cast send "$TOKEN_A" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_A_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$TOKEN_B" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_B_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$ROUTER" \\',
' \'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)\' \\',
' "$TOKEN_A" "$TOKEN_B" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$SIGNER" "$DEADLINE" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
]
)
return {
"pair": pair,
"token_a": token_a,
"token_b": token_b,
"token_a_address": token_a_addr,
"token_b_address": token_b_addr,
"amount_env_a": amount_a_env,
"amount_env_b": amount_b_env,
"probe_block": probe,
"create_if_absent_block": create,
"deploy_block": deploy,
}
def main() -> None:
phase = load(PHASE_ORDER)
status = load(DEPLOYMENT_STATUS)
entries = []
for entry in phase["entries"]:
chain_id = entry["chain_id"]
chain_status = chain_entry(status, chain_id)
rpc_keys = RPC_ENV_KEYS.get(chain_id, [])
phase_2_pairs = entry.get("phase_2_full_cw_wrapped_mesh", [])
pair_entries = []
for pair in phase_2_pairs:
token_a, token_b = pair.split("/")
token_a_addr = resolve_token(chain_status, token_a)
token_b_addr = resolve_token(chain_status, token_b)
if not token_a_addr or not token_b_addr:
continue
pair_entries.append(
pair_commands(chain_id, pair, token_a_addr, token_b_addr, rpc_keys[0] if rpc_keys else "RPC_URL")
)
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"tier": entry["tier"],
"phase_1_core_rail": entry["phase_1_core_rail"],
"phase_2_pair_count": len(pair_entries),
"phase_2_other_gru_v2_cw_tokens": entry.get("phase_2_other_gru_v2_cw_tokens", []),
"rpc_env_keys": rpc_keys,
"required_uniswap_v2_env_vars": [
f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY",
f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER",
f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK",
],
"phase_2_pairs": pair_entries,
"post_pair_commands": [
"bash scripts/verify/build-promod-uniswap-v2-live-pair-discovery.sh",
"python3 scripts/lib/promod_uniswap_v2_live_pair_discovery.py --write-discovered",
"node cross-chain-pmm-lps/scripts/validate-deployment-status.cjs cross-chain-pmm-lps/config/deployment-status.json",
"bash scripts/verify/build-promod-uniswap-v2-promotion-gates.sh",
],
}
)
payload = {
"generated_at": now(),
"program_name": phase["program_name"],
"purpose": "Exact phase-2 live operator sequence for the full cW* wrapped mesh rollout, using per-pair amount envs rather than fixed sizing assumptions.",
"mainnet_funding_posture": phase["mainnet_funding_posture"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase-order-latest.json",
"cross-chain-pmm-lps/config/deployment-status.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 2 Operator Sequence",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: {payload['program_name']}",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
"- Purpose: exact phase-2 live operator sequence for the full `cW*` wrapped mesh rollout.",
"- Funding rule: each pair uses explicit raw amount env vars because BTC, gold, FX, and fiat rails do not share a safe default seed size.",
"",
"| Chain | Network | Phase 2 Pair Count | RPC Keys | Required Env |",
"|---|---|---:|---|---|",
]
for entry in entries:
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['phase_2_pair_count']}` | "
f"{', '.join(f'`{x}`' for x in entry['rpc_env_keys'])} | "
f"{', '.join(f'`{x}`' for x in entry['required_uniswap_v2_env_vars'])} |"
)
lines.extend(["", "## Per-Chain Sequence", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Phase 1 prerequisite: `{entry['phase_1_core_rail']}`")
lines.append(f"- Phase 2 cW* count: `{entry['phase_2_pair_count']}`")
lines.append(f"- Other phase-2 cW* tokens: {', '.join(f'`{x}`' for x in entry['phase_2_other_gru_v2_cw_tokens']) or 'none'}")
lines.append("")
for pair_entry in entry["phase_2_pairs"]:
lines.append(f"#### `{pair_entry['pair']}`")
lines.append("")
lines.append(
f"- Amount envs: `{pair_entry['amount_env_a']}`, `{pair_entry['amount_env_b']}`"
)
lines.append(
f"- Token addresses: `{pair_entry['token_a']}={pair_entry['token_a_address']}`, `{pair_entry['token_b']}={pair_entry['token_b_address']}`"
)
lines.append("Probe:")
lines.append("```bash")
lines.append(pair_entry["probe_block"])
lines.append("```")
lines.append("Create if absent:")
lines.append("```bash")
lines.append(pair_entry["create_if_absent_block"])
lines.append("```")
lines.append("Deploy:")
lines.append("```bash")
lines.append(pair_entry["deploy_block"])
lines.append("```")
lines.append("")
lines.append("Post-pair refresh commands:")
for cmd in entry["post_pair_commands"]:
lines.append(f"- `{cmd}`")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
SOURCE = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-operator-sequence-latest.json"
SCRIPT = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_SHELL_PASTE_PACK.sh"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-shell-paste-pack-latest.json"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main() -> None:
source = load(SOURCE)
blocks = []
flat_entries = []
for chain in source["entries"]:
for pair in chain["phase_2_pairs"]:
flat_entries.append(
{
"chain_id": chain["chain_id"],
"network": chain["network"],
"pair": pair["pair"],
"amount_env_a": pair["amount_env_a"],
"amount_env_b": pair["amount_env_b"],
}
)
blocks.append(pair["create_if_absent_block"].strip())
blocks.append("")
blocks.append(pair["deploy_block"].strip())
blocks.append("")
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 2 shell paste pack",
"purpose": "Pure shell-only paste pack for every phase-2 wrapped-mesh pair in sequence.",
"entries": flat_entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase2-operator-sequence-latest.json",
],
}
write_json(REPORT, payload)
write_text(SCRIPT, "\n".join(blocks))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,214 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import os
import subprocess
from datetime import datetime, timezone
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[2]
PHASE2_SEQUENCE = REPO_ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-operator-sequence-latest.json"
OUT_JSON = REPO_ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave1-completion-status-latest.json"
OUT_MD = REPO_ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_WAVE1_COMPLETION_STATUS.md"
WAVE1_LABELS = {
"cWAUDC/cWUSDC",
"cWAUDC/cWUSDT",
"cWEURC/cWUSDC",
"cWEURC/cWUSDT",
"cWGBPC/cWUSDC",
"cWGBPC/cWUSDT",
}
TOKEN_OVERRIDES = {
137: {
"cWAUDC/cWUSDC": ("0xFb4B6Cc81211F7d886950158294A44C312abCA29", "0xd6969bC19b53f866C64f2148aE271B2Dae0C58E4"),
"cWAUDC/cWUSDT": ("0xFb4B6Cc81211F7d886950158294A44C312abCA29", "0x0cb0192C056aa425C557BdeAD8E56C7eEabf7acF"),
"cWEURC/cWUSDC": ("0x3CD9ee18db7ad13616FCC1c83bC6098e03968E66", "0xd6969bC19b53f866C64f2148aE271B2Dae0C58E4"),
"cWEURC/cWUSDT": ("0x3CD9ee18db7ad13616FCC1c83bC6098e03968E66", "0x0cb0192C056aa425C557BdeAD8E56C7eEabf7acF"),
"cWGBPC/cWUSDC": ("0x948690147D2e50ffe50C5d38C14125aD6a9FA036", "0xd6969bC19b53f866C64f2148aE271B2Dae0C58E4"),
"cWGBPC/cWUSDT": ("0x948690147D2e50ffe50C5d38C14125aD6a9FA036", "0x0cb0192C056aa425C557BdeAD8E56C7eEabf7acF"),
}
}
PAIR_ADDRESS_OVERRIDES = {
137: {
"cWAUDC/cWUSDC": "0x6ffa939d75bd6affe019705f2c9240f97975ffa0",
"cWAUDC/cWUSDT": "0x526a3a38b77d199e8fd07f37597f9ca0fa5a87cd",
"cWEURC/cWUSDC": "0xd5907a692f7e8f650fc5feb8ebb3196fea2069a3",
"cWEURC/cWUSDT": "0x3292c0ed9eec0443635367717047876fe3cdb514",
"cWGBPC/cWUSDC": "0x52786e752be5fb1b18e86959f87b7a59e2c6de6d",
"cWGBPC/cWUSDT": "0x1b6e8484db0cd9c00d39e457c2d126c8983f5390",
}
}
RPC_KEYS = {
1: ["ETHEREUM_MAINNET_RPC"],
10: ["OPTIMISM_MAINNET_RPC"],
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
56: ["BSC_MAINNET_RPC", "BSC_RPC_URL"],
100: ["GNOSIS_MAINNET_RPC", "GNOSIS_RPC_URL"],
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
8453: ["BASE_MAINNET_RPC", "BASE_RPC_URL"],
42161: ["ARBITRUM_MAINNET_RPC", "ARBITRUM_RPC_URL"],
42220: ["CELO_MAINNET_RPC", "CELO_RPC_URL"],
43114: ["AVALANCHE_MAINNET_RPC", "AVALANCHE_RPC_URL"],
}
VERIFICATION_STATUS = {
"status": "blocked",
"summary": "Deployment and liquidity rollout is complete, but explorer publication is not fully complete.",
"blockers": [
"Current local CompliantWrappedToken artifact does not exactly match deployed runtime bytecode.",
"forge verify-contract cannot use the historical deploy profile directly in this environment.",
"Some explorer backends require paid API access or manual submission paths.",
],
}
def load_env() -> dict[str, str]:
env_dump = Path("/tmp/promod_phase2_env_snapshot.txt")
subprocess.run(
[
"bash",
"-lc",
f"cd {REPO_ROOT / 'smom-dbis-138'} && source scripts/load-env.sh >/dev/null && env | sort > {env_dump}",
],
check=True,
)
env: dict[str, str] = {}
for line in env_dump.read_text().splitlines():
if "=" in line:
k, v = line.split("=", 1)
env[k] = v
return env
def cast_call(rpc_url: str, to: str, signature: str, *args: str) -> str:
return subprocess.check_output(
["cast", "call", to, signature, *args, "--rpc-url", rpc_url],
text=True,
timeout=30,
).strip()
def write_json(data: dict) -> None:
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(data, indent=2) + "\n")
def write_markdown(data: dict) -> None:
lines: list[str] = []
lines.append("# Promod Uniswap V2 Phase 2 Wave 1 Completion Status")
lines.append("")
lines.append(f"**Generated:** {data['generated_at']}")
lines.append("")
lines.append(f"**Overall Status:** `{data['overall_status']}`")
lines.append("")
lines.append(f"**Completed Chains:** `{', '.join(str(x) for x in data['completed_chain_ids'])}`")
lines.append("")
lines.append("## Reserve Verification")
lines.append("")
lines.append("| Chain | Network | Pair | Pair Address | Reserves | Status |")
lines.append("|---|---|---|---|---|---|")
for chain in data["chains"]:
for pair in chain["wave1_pairs"]:
lines.append(
f"| `{chain['chain_id']}` | {chain['network']} | `{pair['pair']}` | `{pair['pair_address']}` | `{pair['reserves']}` | `{pair['status']}` |"
)
lines.append("")
lines.append("## Explorer Publication")
lines.append("")
lines.append(f"**Status:** `{data['verification_status']['status']}`")
lines.append("")
lines.append(data["verification_status"]["summary"])
lines.append("")
for blocker in data["verification_status"]["blockers"]:
lines.append(f"- {blocker}")
lines.append("")
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
OUT_MD.write_text("\n".join(lines) + "\n")
def main() -> None:
env = load_env()
sequence = json.loads(PHASE2_SEQUENCE.read_text())
chains_out = []
completed_chain_ids = []
for entry in sequence["entries"]:
chain_id = entry["chain_id"]
rpc_url = next((env.get(k) for k in RPC_KEYS[chain_id] if env.get(k)), None)
if not rpc_url:
raise RuntimeError(f"Missing RPC URL for chain {chain_id}")
factory = env[f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY"]
wave1_pairs = []
for pair in entry["phase_2_pairs"]:
if pair["pair"] not in WAVE1_LABELS:
continue
token_a = pair["token_a_address"]
token_b = pair["token_b_address"]
if chain_id in TOKEN_OVERRIDES and pair["pair"] in TOKEN_OVERRIDES[chain_id]:
token_a, token_b = TOKEN_OVERRIDES[chain_id][pair["pair"]]
pair_address = PAIR_ADDRESS_OVERRIDES.get(chain_id, {}).get(pair["pair"])
if not pair_address:
pair_address = cast_call(
rpc_url,
factory,
"getPair(address,address)(address)",
token_a,
token_b,
)
reserves = "PAIR_NOT_FOUND"
status = "missing"
if pair_address.lower() != "0x0000000000000000000000000000000000000000":
reserves = cast_call(
rpc_url,
pair_address,
"getReserves()((uint112,uint112,uint32))",
)
status = "complete" if "(1000000000 [1e9], 1000000000 [1e9]" in reserves else "unexpected_reserves"
wave1_pairs.append(
{
"pair": pair["pair"],
"pair_address": pair_address,
"reserves": reserves,
"status": status,
}
)
chain_status = "complete" if all(p["status"] == "complete" for p in wave1_pairs) else "incomplete"
if chain_status == "complete":
completed_chain_ids.append(chain_id)
chains_out.append(
{
"chain_id": chain_id,
"network": entry["network"],
"status": chain_status,
"wave1_pairs": wave1_pairs,
}
)
data = {
"generated_at": datetime.now(timezone.utc).isoformat(),
"program_name": "promod-uniswap-v2-phase2-wave1-completion-status",
"overall_status": "complete" if len(completed_chain_ids) == len(chains_out) else "incomplete",
"completed_chain_ids": completed_chain_ids,
"verification_status": VERIFICATION_STATUS,
"chains": chains_out,
"source_artifacts": {
"phase2_operator_sequence": str(PHASE2_SEQUENCE.relative_to(REPO_ROOT)),
},
}
write_json(data)
write_markdown(data)
print(OUT_JSON)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
SOURCE = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave2-operator-sequence-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave2-operator-paste-pack-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_WAVE2_OPERATOR_PASTE_PACK.md"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main() -> None:
source = load(SOURCE)
entries = []
for chain in source["entries"]:
for pair in chain["wave2_pairs"]:
entries.append(
{
"chain_id": chain["chain_id"],
"network": chain["network"],
"pair": pair["pair"],
"amount_env_a": pair["amount_env_a"],
"amount_env_b": pair["amount_env_b"],
"probe_block": pair["probe_block"],
"create_if_absent_block": pair["create_if_absent_block"],
"deploy_block": pair["deploy_block"],
"post_pair_commands": chain["post_pair_commands"],
}
)
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 2 wave 2 operator paste pack",
"purpose": "Flat paste-ready operator pack for every phase-2 wave-2 cW* wrapped-mesh pair in execution order.",
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase2-wave2-operator-sequence-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 2 Wave 2 Operator Paste Pack",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: flat paste-ready operator pack for every phase-2 wave-2 wrapped-mesh pair.",
"",
"| Order | Chain | Network | Pair | Amount Envs |",
"|---|---|---|---|---|",
]
for idx, entry in enumerate(entries, start=1):
lines.append(
f"| `{idx}` | `{entry['chain_id']}` | {entry['network']} | `{entry['pair']}` | "
f"`{entry['amount_env_a']}`, `{entry['amount_env_b']}` |"
)
lines.extend(["", "## Flat Paste Pack", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']} — `{entry['pair']}`")
lines.append("")
lines.append("Probe:")
lines.append("```bash")
lines.append(entry["probe_block"])
lines.append("```")
lines.append("")
lines.append("Create if absent:")
lines.append("```bash")
lines.append(entry["create_if_absent_block"])
lines.append("```")
lines.append("")
lines.append("Deploy:")
lines.append("```bash")
lines.append(entry["deploy_block"])
lines.append("```")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,249 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import re
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
GAP_REPORT = ROOT / "reports" / "extraction" / "promod-gru-v2-full-mesh-gap-report-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave2-operator-sequence-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_WAVE2_OPERATOR_SEQUENCE.md"
RPC_ENV_KEYS = {
1: ["ETHEREUM_MAINNET_RPC"],
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"],
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def chain_entry(status: dict, chain_id: int) -> dict:
return status["chains"].get(str(chain_id)) or status["chains"].get(chain_id) or {}
def resolve_token(chain_status: dict, symbol: str) -> str:
cw = chain_status.get("cwTokens", {})
value = cw.get(symbol, "")
if isinstance(value, dict):
return value.get("address") or value.get("token") or ""
return value or ""
def sanitize(text: str) -> str:
return re.sub(r"[^A-Z0-9]+", "_", text.upper()).strip("_")
def amount_envs(chain_id: int, pair: str) -> tuple[str, str]:
token_a, token_b = pair.split("/")
prefix = f"PHASE2_WAVE2_{chain_id}_{sanitize(token_a)}_{sanitize(token_b)}"
return f"{prefix}_A_RAW", f"{prefix}_B_RAW"
def pair_commands(chain_id: int, pair: str, token_a_addr: str, token_b_addr: str, rpc_key: str) -> dict:
factory_var = f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY"
router_var = f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER"
amount_a_env, amount_b_env = amount_envs(chain_id, pair)
token_a, token_b = pair.split("/")
prelude = [
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
f'export RPC_URL="${{{rpc_key}}}"',
f'export FACTORY="${{{factory_var}}}"',
f'export ROUTER="${{{router_var}}}"',
f'export TOKEN_A="{token_a_addr}"',
f'export TOKEN_B="{token_b_addr}"',
f'export AMOUNT_A_RAW="${{{amount_a_env}:-}}"',
f'export AMOUNT_B_RAW="${{{amount_b_env}:-}}"',
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
'test -n "$AMOUNT_A_RAW" && test -n "$AMOUNT_B_RAW"',
]
probe = 'cast call "$FACTORY" \'getPair(address,address)(address)\' "$TOKEN_A" "$TOKEN_B" --rpc-url "$RPC_URL"'
create = "\n".join(
prelude
+ [
'PAIR="$(cast call "$FACTORY" \'getPair(address,address)(address)\' "$TOKEN_A" "$TOKEN_B" --rpc-url "$RPC_URL")"',
'if [[ "$PAIR" == "0x0000000000000000000000000000000000000000" ]]; then',
' cast send "$FACTORY" \'createPair(address,address)(address)\' "$TOKEN_A" "$TOKEN_B" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"fi",
]
)
deploy = "\n".join(
prelude
+ [
'cast send "$TOKEN_A" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_A_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$TOKEN_B" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_B_RAW" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
"",
'cast send "$ROUTER" \\',
' \'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)\' \\',
' "$TOKEN_A" "$TOKEN_B" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$SIGNER" "$DEADLINE" \\',
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
]
)
return {
"pair": pair,
"token_a": token_a,
"token_b": token_b,
"token_a_address": token_a_addr,
"token_b_address": token_b_addr,
"amount_env_a": amount_a_env,
"amount_env_b": amount_b_env,
"probe_block": probe,
"create_if_absent_block": create,
"deploy_block": deploy,
}
def main() -> None:
status = load(DEPLOYMENT_STATUS)
gap = load(GAP_REPORT)
entries = []
total_pairs = 0
for row in gap["chains"]:
chain_id = row["chain_id"]
chain_status = chain_entry(status, chain_id)
rpc_keys = RPC_ENV_KEYS.get(chain_id, [])
remaining_assets = row.get("remaining_wrapped_mesh_assets_after_wave1", [])
recommended_pairs = row.get("recommended_next_wrapped_mesh_pairs", [])
pair_entries = []
for pair in recommended_pairs:
token_a, token_b = pair.split("/")
token_a_addr = resolve_token(chain_status, token_a)
token_b_addr = resolve_token(chain_status, token_b)
if not token_a_addr or not token_b_addr:
continue
pair_entries.append(
pair_commands(chain_id, pair, token_a_addr, token_b_addr, rpc_keys[0] if rpc_keys else "RPC_URL")
)
total_pairs += len(pair_entries)
entries.append(
{
"chain_id": chain_id,
"network": row["network"],
"bridge_available": row["bridge_available"],
"remaining_wrapped_mesh_assets_after_wave1": remaining_assets,
"wave2_pair_count": len(pair_entries),
"rpc_env_keys": rpc_keys,
"required_uniswap_v2_env_vars": [
f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY",
f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER",
f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK",
],
"wave2_pairs": pair_entries,
"post_pair_commands": [
"bash scripts/verify/build-promod-uniswap-v2-live-pair-discovery.sh",
"python3 scripts/lib/promod_uniswap_v2_live_pair_discovery.py --write-discovered",
"node cross-chain-pmm-lps/scripts/validate-deployment-status.cjs cross-chain-pmm-lps/config/deployment-status.json",
"bash scripts/verify/build-promod-uniswap-v2-promotion-gates.sh",
],
}
)
payload = {
"generated_at": now(),
"program_name": "promod-uniswap-v2-phase2-wave2-operator-sequence",
"purpose": "Exact phase-2 wave-2 operator sequence for the remaining GRU v2 cW* wrapped mesh assets after wave 1, using per-pair amount envs and current live gap state.",
"total_wave2_pair_count": total_pairs,
"source_artifacts": [
"reports/extraction/promod-gru-v2-full-mesh-gap-report-latest.json",
"cross-chain-pmm-lps/config/deployment-status.json",
],
"entries": entries,
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase 2 Wave 2 Operator Sequence",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: `{payload['program_name']}`",
"- Purpose: exact phase-2 wave-2 operator sequence for the remaining `cW*` wrapped-mesh rollout after wave 1.",
"- Funding rule: each pair uses explicit raw amount env vars because BTC, gold, FX, and fiat rails do not share a safe default seed size.",
f"- Total wave-2 pairs: `{payload['total_wave2_pair_count']}`",
"",
"| Chain | Network | Remaining Assets | Wave 2 Pair Count | RPC Keys | Required Env |",
"|---|---|---|---:|---|---|",
]
for entry in entries:
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | "
f"{', '.join(f'`{x}`' for x in entry['remaining_wrapped_mesh_assets_after_wave1']) or '`none`'} | "
f"`{entry['wave2_pair_count']}` | "
f"{', '.join(f'`{x}`' for x in entry['rpc_env_keys'])} | "
f"{', '.join(f'`{x}`' for x in entry['required_uniswap_v2_env_vars'])} |"
)
lines.extend(["", "## Per-Chain Sequence", ""])
for entry in entries:
lines.append(f"### Chain `{entry['chain_id']}` — {entry['network']}")
lines.append("")
lines.append(f"- Bridge available: `{entry['bridge_available']}`")
lines.append(
f"- Remaining wave-2 assets: {', '.join(f'`{x}`' for x in entry['remaining_wrapped_mesh_assets_after_wave1']) or '`none`'}"
)
lines.append(f"- Wave-2 pair count: `{entry['wave2_pair_count']}`")
lines.append("")
for pair_entry in entry["wave2_pairs"]:
lines.append(f"#### `{pair_entry['pair']}`")
lines.append("")
lines.append(f"- Amount envs: `{pair_entry['amount_env_a']}`, `{pair_entry['amount_env_b']}`")
lines.append(
f"- Token addresses: `{pair_entry['token_a']}={pair_entry['token_a_address']}`, `{pair_entry['token_b']}={pair_entry['token_b_address']}`"
)
lines.append("Probe:")
lines.append("```bash")
lines.append(pair_entry["probe_block"])
lines.append("```")
lines.append("Create if absent:")
lines.append("```bash")
lines.append(pair_entry["create_if_absent_block"])
lines.append("```")
lines.append("Deploy:")
lines.append("```bash")
lines.append(pair_entry["deploy_block"])
lines.append("```")
lines.append("")
lines.append("Post-pair refresh commands:")
for cmd in entry["post_pair_commands"]:
lines.append(f"- `{cmd}`")
lines.append("")
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import time
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
SOURCE = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave2-operator-sequence-latest.json"
SCRIPT = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_WAVE2_SHELL_PASTE_PACK.sh"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave2-shell-paste-pack-latest.json"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main() -> None:
source = load(SOURCE)
blocks = []
flat_entries = []
for chain in source["entries"]:
for pair in chain["wave2_pairs"]:
flat_entries.append(
{
"chain_id": chain["chain_id"],
"network": chain["network"],
"pair": pair["pair"],
"amount_env_a": pair["amount_env_a"],
"amount_env_b": pair["amount_env_b"],
}
)
blocks.append(pair["create_if_absent_block"].strip())
blocks.append("")
blocks.append(pair["deploy_block"].strip())
blocks.append("")
payload = {
"generated_at": now(),
"program_name": "Mr. Promod Uniswap V2 phase 2 wave 2 shell paste pack",
"purpose": "Pure shell-only paste pack for every phase-2 wave-2 wrapped-mesh pair in sequence.",
"entries": flat_entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-phase2-wave2-operator-sequence-latest.json",
],
}
write_json(REPORT, payload)
write_text(SCRIPT, "\n".join(blocks))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,110 @@
#!/usr/bin/env python3
from __future__ import annotations
from pathlib import Path
import json
import time
ROOT = Path(__file__).resolve().parents[2]
LIQUIDITY_PROGRAM = ROOT / "reports" / "extraction" / "promod-uniswap-v2-liquidity-program-latest.json"
FIRST_MATRIX = ROOT / "reports" / "extraction" / "promod-uniswap-v2-first-deployment-target-matrix-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase-order-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE_ORDER.md"
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load(path: Path):
return json.loads(path.read_text())
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def main():
liquidity = load(LIQUIDITY_PROGRAM)
first = load(FIRST_MATRIX)
first_by_chain = {entry["chain_id"]: entry for entry in first["entries"]}
entries = []
for entry in liquidity["entries"]:
chain_id = entry["chain_id"]
first_entry = first_by_chain.get(chain_id, {})
phase_1 = first_entry.get("first_pair")
phase_2 = first_entry.get("next_wrapped_expansion_pairs", [])
phase_3 = entry.get("settlement_phase_pairs", [])
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"tier": entry["tier"],
"hub_stable": entry["hub_stable"],
"phase_1_core_rail": phase_1,
"phase_1_required_tokens": first_entry.get("required_tokens", []),
"phase_2_full_cw_wrapped_mesh": phase_2,
"phase_2_other_gru_v2_cw_tokens": first_entry.get("other_gru_v2_cw_tokens", []),
"phase_3_settlement_rails": phase_3,
"remaining_live_blockers": first_entry.get("remaining_live_blockers", []),
"post_phase_1_commands": first_entry.get("post_deploy_commands", []),
}
)
payload = {
"generated_at": now(),
"program_name": liquidity["program_name"],
"purpose": "Strict phase-order artifact for Mr. Promod's Uniswap V2 rollout: phase 1 core rail -> phase 2 full cW* wrapped mesh -> phase 3 settlement rails.",
"mainnet_funding_posture": liquidity["mainnet_funding_posture"],
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-liquidity-program-latest.json",
"reports/extraction/promod-uniswap-v2-first-deployment-target-matrix-latest.json",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Phase Order",
"",
f"- Generated: `{payload['generated_at']}`",
f"- Program: {payload['program_name']}",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
"- Purpose: strict rollout order for each chain: phase 1 core rail -> phase 2 full cW* wrapped mesh -> phase 3 settlement rails.",
"",
"| Chain | Network | Phase 1 Core Rail | Phase 2 Full cW* Wrapped Mesh | Phase 3 Settlement Rails |",
"|---|---|---|---|---|",
]
for entry in entries:
phase_2 = ", ".join(f"`{pair}`" for pair in entry["phase_2_full_cw_wrapped_mesh"][:10]) or ""
phase_3 = ", ".join(f"`{pair}`" for pair in entry["phase_3_settlement_rails"]) or ""
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['phase_1_core_rail']}` | {phase_2} | {phase_3} |"
)
lines.extend(
[
"",
"## Phase Rules",
"",
"- Phase 1 opens the network with the standardized core rail `cWUSDT/cWUSDC` when available.",
"- Phase 2 expands the rest of the documented GRU v2 `cW*` assets into wrapped pairs against `cWUSDC` and `cWUSDT`.",
"- Phase 3 adds canonical settlement rails only after the wrapped mesh exists and the chain is ready to expose deeper stable exits.",
]
)
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,251 @@
#!/usr/bin/env python3
from pathlib import Path
import json
import time
ROOT = Path(__file__).resolve().parents[2]
PROMOD_REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-liquidity-program-latest.json"
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
ENV_PATH = ROOT / "smom-dbis-138" / ".env"
LIVE_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
REPORT = ROOT / "reports" / "extraction" / "promod-uniswap-v2-promotion-gates-latest.json"
DOC = ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PROMOTION_GATES.md"
UNIV2_CODE_SUPPORTED = {1, 10, 25, 56, 100, 137, 138, 1111, 8453, 42161, 42220, 43114, 651940}
def load(path: Path):
return json.loads(path.read_text())
def load_env_file(path: Path):
values = {}
if not path.exists():
return values
for raw_line in path.read_text().splitlines():
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
values[key.strip()] = value.strip()
return values
def write_json(path: Path, payload):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n")
def write_text(path: Path, text: str):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text.rstrip() + "\n")
def now():
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def build_env_vars(chain_id: int):
prefix = f"CHAIN_{chain_id}_UNISWAP_V2"
return [
{
"name": f"{prefix}_FACTORY",
"purpose": "Factory address used by token-aggregation to discover PairCreated events and pair addresses.",
},
{
"name": f"{prefix}_ROUTER",
"purpose": "Router address used for quote execution assumptions and operator documentation.",
},
{
"name": f"{prefix}_START_BLOCK",
"purpose": "Start block for indexer backfill so the first live pool is visible without scanning the full chain.",
},
]
def build_required_registry_records(chain_id: int, hub_stable: str, documented_cw_tokens: list[str], all_pairs: list[str]):
required_cw_tokens = sorted({pair.split("/")[0] for pair in all_pairs if pair.split("/")[0].startswith("cW")})
records = []
for symbol in required_cw_tokens:
records.append(
{
"path": f"chains[{chain_id}].cwTokens.{symbol}",
"purpose": f"Document the deployed {symbol} address on this chain.",
}
)
records.append(
{
"path": f"chains[{chain_id}].anchorAddresses.{hub_stable}",
"purpose": f"Document the hub stable address used by settlement-phase pairs on this chain.",
}
)
records.append(
{
"path": f"chains[{chain_id}].pmmPools[]",
"purpose": "Record each promoted Uniswap V2 pair as a source-of-truth pool entry using base, quote, and poolAddress until a dedicated uniswapV2Pools registry is introduced.",
"required_fields": ["base", "quote", "poolAddress"],
}
)
records.append(
{
"path": f"ai-mcp-pmm-controller/config/allowlist-{chain_id}.json",
"purpose": "Expose the promoted pool to MCP/API visibility after it is written to deployment-status.json.",
}
)
return records
def main():
promod = load(PROMOD_REPORT)
deployment = load(DEPLOYMENT_STATUS)
env_values = load_env_file(ENV_PATH)
live_discovery = load(LIVE_DISCOVERY) if LIVE_DISCOVERY.exists() else {"entries": []}
discovery_by_chain = {str(entry["chain_id"]): entry for entry in live_discovery.get("entries", [])}
entries = []
blocked_chain_count = 0
for entry in promod["entries"]:
chain_id = int(entry["chain_id"])
chain_status = deployment["chains"].get(str(chain_id), {})
all_pairs = entry["wrapped_depth_phase_pairs"] + entry["settlement_phase_pairs"]
code_support = chain_id in UNIV2_CODE_SUPPORTED
env_var_names = [item["name"] for item in build_env_vars(chain_id)]
env_present = all(env_values.get(name) for name in env_var_names)
discovered_live = any(row.get("live") for row in discovery_by_chain.get(str(chain_id), {}).get("pairsChecked", []))
recorded_rows = chain_status.get("uniswapV2Pools", [])
recorded_live = any(
row.get("base") in {pair.split("/")[0] for pair in all_pairs}
and row.get("quote") in {pair.split("/")[1] for pair in all_pairs}
for row in recorded_rows
)
blockers = []
if not code_support:
blockers.append(
"token-aggregation dex-factories.ts does not yet expose CHAIN_<id>_UNISWAP_V2_* wiring for this chain."
)
if not env_present:
blockers.append(
"Uniswap V2 factory/router/start-block env values are not fully documented in smom-dbis-138/.env for this chain."
)
if not discovered_live:
blockers.append("No live cW Uniswap V2-compatible pair is currently discoverable for this chain.")
if not recorded_live:
blockers.append("No live cW Uniswap V2-compatible pair is currently recorded in deployment-status.json for this chain.")
blockers.extend(
blocker
for blocker in entry.get("blockers", [])
if "Uniswap V2 factory/router addresses are not documented in-repo" not in blocker
and "New Uniswap V2 pools must be added to token-aggregation indexing and MCP/API visibility before promotion." not in blocker
)
promotion_ready = code_support and env_present and discovered_live and recorded_live
if blockers:
blocked_chain_count += 1
pool_templates = []
for pair in all_pairs:
base, quote = pair.split("/")
pool_templates.append(
{
"base": base,
"quote": quote,
"poolAddress": "0x...",
}
)
entries.append(
{
"chain_id": chain_id,
"network": entry["network"],
"tier": entry["tier"],
"hub_stable": entry["hub_stable"],
"code_support_status": "ready" if code_support else "blocked",
"env_values_present": env_present,
"exact_env_vars_to_fill": build_env_vars(chain_id),
"required_registry_records": build_required_registry_records(
chain_id,
entry["hub_stable"],
entry["documented_cw_tokens"],
all_pairs,
),
"pool_entry_templates": pool_templates,
"existing_documented_cw_tokens": entry["documented_cw_tokens"],
"existing_anchor_addresses": chain_status.get("anchorAddresses", {}),
"promotion_gate": {
"factory_router_env_present": env_present,
"indexer_support_present": code_support,
"pool_registry_recorded": recorded_live,
"mcp_or_api_visibility_added": env_present,
"promotion_ready": promotion_ready,
},
"blocking_items": blockers,
}
)
payload = {
"generated_at": now(),
"program_name": promod["program_name"],
"purpose": "Exact per-chain env vars and registry records required before any Mr. Promod Uniswap V2 pool can be promoted as live.",
"mainnet_funding_posture": promod["mainnet_funding_posture"],
"summary": {
"chain_count": len(entries),
"blocked_chain_count": blocked_chain_count,
"note": "All target public chains remain blocked for live promotion until Uniswap V2 env wiring, registry entries, and pool/indexer visibility are complete.",
},
"entries": entries,
"source_artifacts": [
"reports/extraction/promod-uniswap-v2-liquidity-program-latest.json",
"reports/extraction/promod-uniswap-v2-live-pair-discovery-latest.json",
"cross-chain-pmm-lps/config/deployment-status.json",
"smom-dbis-138/services/token-aggregation/src/config/dex-factories.ts",
],
}
write_json(REPORT, payload)
lines = [
"# Mr. Promod Uniswap V2 Promotion Gates",
"",
f"- Generated: `{payload['generated_at']}`",
"- Purpose: exact per-chain env vars and registry records required before the first live Uniswap V2 pool can be promoted.",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
f"- Chains in scope: `{payload['summary']['chain_count']}`",
f"- Currently blocked: `{payload['summary']['blocked_chain_count']}`",
"",
"## Global Rule",
"",
"- Promotion requires all of the following on the target chain: Uniswap V2 factory/router/start-block env set, indexer code support present, a live pair discoverable on-chain, and the pool address recorded in `deployment-status.json`.",
"",
"## Operator Table",
"",
"| Chain | Network | Code Support | Exact Env Vars To Fill | Registry Records To Fill |",
"|---|---|---|---|---|",
]
for entry in entries:
env_vars = ", ".join(f"`{item['name']}`" for item in entry["exact_env_vars_to_fill"])
registry = ", ".join(f"`{item['path']}`" for item in entry["required_registry_records"][:3])
lines.append(
f"| `{entry['chain_id']}` | {entry['network']} | `{entry['code_support_status']}` | {env_vars} | {registry} |"
)
lines.extend(
[
"",
"## First Live Pool Minimum Checklist",
"",
"1. Add `CHAIN_<id>_UNISWAP_V2_FACTORY`, `CHAIN_<id>_UNISWAP_V2_ROUTER`, and `CHAIN_<id>_UNISWAP_V2_START_BLOCK` for the target chain.",
"2. Extend token-aggregation code support for that chain if `code_support_status` is `blocked`.",
"3. Create the pool on-chain and record its `base`, `quote`, and `poolAddress` in `cross-chain-pmm-lps/config/deployment-status.json`.",
"4. Rebuild live-pair discovery and promotion-gate artifacts so the new pair is visible to operator tooling.",
"5. Only then promote the pair as live in operator-facing docs or routing artifacts.",
]
)
write_text(DOC, "\n".join(lines))
print(REPORT)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
from pathlib import Path
import json, time
ROOT = Path(__file__).resolve().parents[2]
REPORTS = ROOT / "reports" / "extraction"
DOCS = ROOT / "docs" / "03-deployment"
CONFIG = ROOT / "config" / "extraction"
def load(p): return json.loads(p.read_text())
def write(p, data): p.parent.mkdir(parents=True, exist_ok=True); p.write_text(json.dumps(data, indent=2)+"\n")
def write_text(p, text): p.parent.mkdir(parents=True, exist_ok=True); p.write_text(text.rstrip()+"\n")
def now(): return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
review = load(REPORTS / "immediate-and-same-day-corridor-assets-latest.json")
strict = load(REPORTS / "strict-operator-public-liquidity-table-latest.json")
inv = load(CONFIG / "additional-wallet-inventory.json")
policy = load(CONFIG / "source-to-cex-production-policy.json")
sinks = [a for a in inv.get("offchain_accounts", []) if a.get("include_in_baseline")]
payload = {
"generated_at": now(),
"strategy_frame": policy["default_route_model"],
"operator_rule": "treat on-chain Mainnet cW/canonical pools as bounded conversion handshakes before external execution",
"production_enabled": policy.get("production_enabled", False),
"mainnet_funding_posture": {
"mode": "dual-rail",
"required_deployer_assets": ["cWUSDC", "cWUSDT"],
"primary_normalization_rail": "cWUSDC -> USDC",
"support_normalization_rail": "cWUSDT -> cWUSDC -> USDC",
"preferred_final_settlement_asset": "USDC",
"notes": [
"The deployer wallet should maintain both Mainnet wrapped stables.",
"This dual-rail funding posture supports both the primary USDC settlement path and the cUSDT support rail."
]
},
"source_bucket_totals_usd": review["bucket_subtotals_usd"],
"source_bucket_counts": review["bucket_counts"],
"production_sinks": sinks,
"entries": [
{"source_asset":"cUSDC","source_location":"Chain 138","source_bucket":"same_day_corridor","bridge_output":"cWUSDC on Ethereum Mainnet","preferred_normalization":"cWUSDC -> USDC","aggregator_decision_set":["direct cWUSDC -> USDC","independent USDC sink if live","RFQ wrapped-to-USDC if available"],"preferred_cex_deposit_asset":"USDC","fallback_path":"bridge cUSDC -> cWUSDC, then use best available Mainnet USDC normalization route with capped public-pool usage","execution_model":"bridge first, normalize on Mainnet, then deposit USDC to CEX"},
{"source_asset":"cUSDT","source_location":"Chain 138","source_bucket":"same_day_corridor","bridge_output":"cWUSDT on Ethereum Mainnet","preferred_normalization":"cWUSDT -> cWUSDC -> USDC","aggregator_decision_set":["direct cWUSDT -> USDT","cWUSDT -> cWUSDC -> USDC","RFQ wrapped-to-stable conversion if available"],"preferred_cex_deposit_asset":"USDC","fallback_path":"use direct cWUSDT -> USDT only for tiny packets or explicit override; otherwise route through USDC normalization","execution_model":"bridge first, compare direct USDT vs USDC-normalization path on Mainnet, then deposit the settlement stable to CEX"},
{"source_asset":"LP:cUSDT/cUSDC","source_location":"Chain 138","source_bucket":"same_day_corridor","bridge_output":"cUSDT and cUSDC after LP withdrawal","preferred_normalization":"withdraw LP -> prefer cUSDC feeder -> bridge -> cWUSDC -> USDC","aggregator_decision_set":["post-withdrawal normalization","direct cWUSDC -> USDC","cWUSDT -> cWUSDC -> USDC if needed"],"preferred_cex_deposit_asset":"USDC","fallback_path":"withdraw LP, split outputs by best feeder path, avoid forcing whole ticket through direct USDT sink","execution_model":"LP unwind is feeder preparation, not terminal execution"},
{"source_asset":"cWUSDC","source_location":"Ethereum Mainnet","source_bucket":"immediate","bridge_output":"none","preferred_normalization":"cWUSDC -> USDC","aggregator_decision_set":["direct cWUSDC -> USDC","alternative independent USDC sinks","RFQ wrapped-to-USDC conversion"],"preferred_cex_deposit_asset":"USDC","fallback_path":"split size, cap pool usage, treat on-chain conversion strictly as deposit preparation","execution_model":"same-hour handshake into USDC, then immediate CEX handoff"},
{"source_asset":"cWUSDT","source_location":"Ethereum Mainnet","source_bucket":"immediate","bridge_output":"none","preferred_normalization":"cWUSDT -> cWUSDC -> USDC","aggregator_decision_set":["direct cWUSDT -> USDT","cWUSDT -> cWUSDC -> USDC","RFQ wrapped-to-stable conversion"],"preferred_cex_deposit_asset":"USDC","fallback_path":"direct USDT path is last-resort and tiny; practical size should flow through USDC-normalization","execution_model":"same-hour normalization decision on Mainnet, then CEX handoff"}
],
"source_artifacts": [
"reports/extraction/immediate-and-same-day-corridor-assets-latest.json",
"reports/extraction/strict-operator-public-liquidity-table-latest.json",
"config/extraction/additional-wallet-inventory.json",
"config/extraction/source-to-cex-production-policy.json"
]
}
write(REPORTS / "source-to-cex-execution-plan-latest.json", payload)
lines = [
"# Source To CEX Execution Plan","",
f"- Generated: `{payload['generated_at']}`",
f"- Strategy frame: {payload['strategy_frame']}",
f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`",
"","## Operator Table","","| Source Asset | Bridge Output | Preferred Normalization | CEX Deposit Asset | Fallback Path |","|---|---|---|---|---|"
]
for row in payload['entries']:
lines.append(f"| `{row['source_asset']}` | {row['bridge_output']} | {row['preferred_normalization']} | `{row['preferred_cex_deposit_asset']}` | {row['fallback_path']} |")
lines += ["","## Notes","","- `cUSDC` is the cleanest same-day corridor feeder.","- `cUSDT` should usually normalize through USDC until direct USDT depth improves.","- Stable LP claims are feeder-preparation assets.","- Mainnet `cWUSDC` and `cWUSDT` are immediate in mechanics, but not deep enough to absorb large tickets on-chain."]
write_text(DOCS / "SOURCE_TO_CEX_EXECUTION_PLAN.md", "\n".join(lines))
print(REPORTS / "source-to-cex-execution-plan-latest.json")

View File

@@ -0,0 +1,233 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import json
import os
import sys
import time
from pathlib import Path
from typing import Any, Dict, List
ROOT = Path(__file__).resolve().parents[2]
CONFIG = ROOT / "config" / "extraction"
REPORTS = ROOT / "reports" / "extraction"
INVENTORY = CONFIG / "additional-wallet-inventory.json"
OUT_VALIDATION = REPORTS / "source-to-cex-offchain-sink-validation-latest.json"
SOURCE_TO_CEX_PLAN = REPORTS / "source-to-cex-execution-plan-latest.json"
REQUIRED_ENV = {
"label": "SOURCE_TO_CEX_SINK_LABEL",
"platform": "SOURCE_TO_CEX_SINK_PLATFORM",
"account_type": "SOURCE_TO_CEX_SINK_ACCOUNT_TYPE",
"preferred_deposit_asset": "SOURCE_TO_CEX_SINK_PREFERRED_DEPOSIT_ASSET",
"deposit_chain_id": "SOURCE_TO_CEX_SINK_DEPOSIT_CHAIN_ID",
"deposit_chain_name": "SOURCE_TO_CEX_SINK_DEPOSIT_CHAIN_NAME",
"deposit_address": "SOURCE_TO_CEX_SINK_DEPOSIT_ADDRESS",
}
def now() -> str:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def load_json(path: Path) -> Dict[str, Any]:
return json.loads(path.read_text())
def write_json(path: Path, data: Dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(data, indent=2) + "\n")
def first_missing_env() -> List[str]:
missing = []
for env_name in REQUIRED_ENV.values():
if not os.environ.get(env_name):
missing.append(env_name)
return missing
def placeholder_like(text: str) -> bool:
lowered = text.strip().lower()
return lowered in {"", "unknown", "example-exchange", "planned"} or lowered.startswith("example")
def validate_inventory() -> Dict[str, Any]:
inv = load_json(INVENTORY)
plan = load_json(SOURCE_TO_CEX_PLAN) if SOURCE_TO_CEX_PLAN.exists() else {}
included = [row for row in inv.get("offchain_accounts", []) if row.get("include_in_baseline")]
sink_rows: List[Dict[str, Any]] = []
blocking: List[str] = []
warnings: List[str] = []
for row in included:
issues = []
if placeholder_like(str(row.get("label", ""))):
issues.append("label still looks like placeholder data")
if placeholder_like(str(row.get("platform", ""))):
issues.append("platform is missing or placeholder")
if not row.get("enabled_for_production_handoff"):
issues.append("enabled_for_production_handoff is false")
if row.get("operational_status") != "enabled":
issues.append("operational_status is not enabled")
if not str(row.get("deposit_address", "")).strip():
issues.append("deposit_address missing")
if not row.get("preferred_deposit_asset"):
issues.append("preferred_deposit_asset missing")
if row.get("min_packet_usd") is None or row.get("max_packet_usd") is None:
issues.append("packet bounds missing")
if row.get("preferred_deposit_asset") and row.get("accepted_deposit_assets"):
if row["preferred_deposit_asset"] not in row["accepted_deposit_assets"]:
warnings.append(
f"{row.get('label')}: preferred_deposit_asset is not listed in accepted_deposit_assets"
)
if issues:
blocking.extend(f"{row.get('label')}: {item}" for item in issues)
sink_rows.append(
{
"label": row.get("label"),
"platform": row.get("platform"),
"account_type": row.get("account_type"),
"operational_status": row.get("operational_status"),
"enabled_for_production_handoff": bool(row.get("enabled_for_production_handoff")),
"preferred_deposit_asset": row.get("preferred_deposit_asset"),
"deposit_chain_id": str(row.get("deposit_chain_id", "")),
"deposit_chain_name": row.get("deposit_chain_name"),
"deposit_address_present": bool(str(row.get("deposit_address", "")).strip()),
"min_packet_usd": row.get("min_packet_usd"),
"max_packet_usd": row.get("max_packet_usd"),
"accepted_deposit_assets": row.get("accepted_deposit_assets", []),
}
)
if not included:
blocking.append("no off-chain sink rows are currently included in baseline scope")
payload = {
"generated_at": now(),
"inventory_path": str(INVENTORY.relative_to(ROOT)),
"mainnet_funding_posture": plan.get("mainnet_funding_posture"),
"included_sink_count": len(included),
"ready": len(blocking) == 0,
"blocking_issues": blocking,
"warnings": warnings,
"sinks": sink_rows,
}
write_json(OUT_VALIDATION, payload)
return payload
def import_from_env(enable_production: bool) -> Dict[str, Any]:
missing = first_missing_env()
if missing:
raise SystemExit(
"Missing required env vars for sink import: " + ", ".join(missing)
)
inv = load_json(INVENTORY)
row = {
"label": os.environ["SOURCE_TO_CEX_SINK_LABEL"],
"platform": os.environ["SOURCE_TO_CEX_SINK_PLATFORM"],
"account_type": os.environ["SOURCE_TO_CEX_SINK_ACCOUNT_TYPE"],
"operational_status": os.environ.get("SOURCE_TO_CEX_SINK_OPERATIONAL_STATUS", "enabled"),
"enabled_for_production_handoff": os.environ.get("SOURCE_TO_CEX_SINK_ENABLE_HANDOFF", "1") not in {"0", "false", "False"},
"accepted_deposit_assets": [
item.strip()
for item in os.environ.get("SOURCE_TO_CEX_SINK_ACCEPTED_DEPOSIT_ASSETS", os.environ["SOURCE_TO_CEX_SINK_PREFERRED_DEPOSIT_ASSET"]).split(",")
if item.strip()
],
"preferred_deposit_asset": os.environ["SOURCE_TO_CEX_SINK_PREFERRED_DEPOSIT_ASSET"],
"deposit_chain_id": os.environ["SOURCE_TO_CEX_SINK_DEPOSIT_CHAIN_ID"],
"deposit_chain_name": os.environ["SOURCE_TO_CEX_SINK_DEPOSIT_CHAIN_NAME"],
"deposit_address": os.environ["SOURCE_TO_CEX_SINK_DEPOSIT_ADDRESS"],
"min_packet_usd": int(os.environ.get("SOURCE_TO_CEX_SINK_MIN_PACKET_USD", "1000")),
"max_packet_usd": int(os.environ.get("SOURCE_TO_CEX_SINK_MAX_PACKET_USD", "250000")),
"slippage_ceiling_bps": int(os.environ.get("SOURCE_TO_CEX_SINK_SLIPPAGE_CEILING_BPS", "100")),
"asset_balances": [
{
"symbol": os.environ.get("SOURCE_TO_CEX_SINK_BALANCE_SYMBOL", os.environ["SOURCE_TO_CEX_SINK_PREFERRED_DEPOSIT_ASSET"]),
"amount": int(os.environ.get("SOURCE_TO_CEX_SINK_BALANCE_AMOUNT", "0")),
"estimated_usd": int(os.environ.get("SOURCE_TO_CEX_SINK_BALANCE_ESTIMATED_USD", "0")),
"chain_id": "offchain",
"chain_name": "Off-chain / custodial",
"notes": "Imported from environment for production sink onboarding.",
}
],
"include_in_baseline": True,
"notes": os.environ.get("SOURCE_TO_CEX_SINK_NOTES", "Imported from environment for source-to-CEX production handoff."),
}
current = inv.get("offchain_accounts", [])
replaced = False
for idx, existing in enumerate(current):
if existing.get("label") == row["label"]:
current[idx] = row
replaced = True
break
if not replaced:
current.insert(0, row)
inv["offchain_accounts"] = current
write_json(INVENTORY, inv)
if enable_production:
policy_path = CONFIG / "source-to-cex-production-policy.json"
policy = load_json(policy_path)
policy["production_enabled"] = True
write_json(policy_path, policy)
return {
"generated_at": now(),
"inventory_path": str(INVENTORY.relative_to(ROOT)),
"imported_sink_label": row["label"],
"production_enabled_set": enable_production,
"accepted_deposit_assets": row["accepted_deposit_assets"],
"preferred_deposit_asset": row["preferred_deposit_asset"],
}
def print_env_template() -> int:
print(
"\n".join(
[
"SOURCE_TO_CEX_SINK_LABEL=",
"SOURCE_TO_CEX_SINK_PLATFORM=",
"SOURCE_TO_CEX_SINK_ACCOUNT_TYPE=cex",
"SOURCE_TO_CEX_SINK_PREFERRED_DEPOSIT_ASSET=USDC",
"SOURCE_TO_CEX_SINK_ACCEPTED_DEPOSIT_ASSETS=USDC",
"SOURCE_TO_CEX_SINK_DEPOSIT_CHAIN_ID=1",
"SOURCE_TO_CEX_SINK_DEPOSIT_CHAIN_NAME=Ethereum Mainnet",
"SOURCE_TO_CEX_SINK_DEPOSIT_ADDRESS=",
"SOURCE_TO_CEX_SINK_OPERATIONAL_STATUS=enabled",
"SOURCE_TO_CEX_SINK_ENABLE_HANDOFF=1",
"SOURCE_TO_CEX_SINK_MIN_PACKET_USD=1000",
"SOURCE_TO_CEX_SINK_MAX_PACKET_USD=250000",
"SOURCE_TO_CEX_SINK_SLIPPAGE_CEILING_BPS=100",
"SOURCE_TO_CEX_SINK_NOTES=",
]
)
)
return 0
def main() -> int:
parser = argparse.ArgumentParser()
sub = parser.add_subparsers(dest="cmd", required=True)
sub.add_parser("validate")
import_parser = sub.add_parser("import-env")
import_parser.add_argument("--enable-production", action="store_true")
sub.add_parser("print-env-template")
args = parser.parse_args()
if args.cmd == "validate":
payload = validate_inventory()
print(json.dumps(payload, indent=2))
return 0 if payload["ready"] else 1
if args.cmd == "import-env":
payload = import_from_env(args.enable_production)
print(json.dumps(payload, indent=2))
return 0
if args.cmd == "print-env-template":
return print_env_template()
return 1
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,79 @@
#!/usr/bin/env python3
from pathlib import Path
import json, time
ROOT = Path(__file__).resolve().parents[2]
CONFIG = ROOT / "config" / "extraction"
REPORTS = ROOT / "reports" / "extraction"
DOCS = ROOT / "docs" / "03-deployment"
VALIDATION = REPORTS / "source-to-cex-offchain-sink-validation-latest.json"
def load(p): return json.loads(p.read_text())
def write(p, data): p.parent.mkdir(parents=True, exist_ok=True); p.write_text(json.dumps(data, indent=2)+"\n")
def write_text(p, text): p.parent.mkdir(parents=True, exist_ok=True); p.write_text(text.rstrip()+"\n")
def now(): return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
policy = load(CONFIG / "source-to-cex-production-policy.json")
inv = load(CONFIG / "additional-wallet-inventory.json")
plan = load(REPORTS / "source-to-cex-execution-plan-latest.json")
strict = load(REPORTS / "strict-operator-public-liquidity-table-latest.json")
validation = load(VALIDATION) if VALIDATION.exists() else None
sinks = []
issues = []
included_sink_count = 0
for a in inv.get('offchain_accounts', []):
if not a.get('include_in_baseline'):
continue
included_sink_count += 1
row = {
'label': a.get('label'), 'platform': a.get('platform', 'unknown'), 'enabled_for_production_handoff': bool(a.get('enabled_for_production_handoff')),
'deposit_address_present': bool(str(a.get('deposit_address','')).strip()), 'operational_status': a.get('operational_status','unknown'),
'preferred_deposit_asset': a.get('preferred_deposit_asset')
}
sinks.append(row)
if not row['enabled_for_production_handoff']: issues.append(f"{row['label']}: production handoff not enabled")
if not row['deposit_address_present']: issues.append(f"{row['label']}: missing deposit address")
if row['operational_status'] != 'enabled': issues.append(f"{row['label']}: operational_status is not enabled")
if not row['preferred_deposit_asset']: issues.append(f"{row['label']}: missing preferred deposit asset")
if included_sink_count == 0:
issues.append('no real off-chain sink is currently included in baseline scope')
direct = {r['route_id']: r['current_verified_public_depth_usd'] for r in strict.get('entries', [])}
payload = {
'generated_at': now(),
'ready_for_immediate_live_production': bool(policy.get('production_enabled')) and not issues and bool(sinks),
'strategy_frame': plan['strategy_frame'],
'mainnet_funding_posture': plan.get('mainnet_funding_posture'),
'policy': {'path':'config/extraction/source-to-cex-production-policy.json','production_enabled': bool(policy.get('production_enabled')),'packetization_policy': policy.get('packetization_policy',{})},
'offchain_sink_validation': validation,
'offchain_sinks': sinks,
'tasks': [
{'task':'operator_model_locked','status':'ready'},
{'task':'baseline_refreshed','status':'ready'},
{'task':'mainnet_normalization_modeled','status':'ready','evidence':{'cwusdc_direct_depth_usd':direct.get('cwusdc-direct-usdc'),'cwusdt_direct_depth_usd':direct.get('cwusdt-direct-usdt')}},
{'task':'production_policy_present','status':'ready'},
{'task':'offchain_sink_defined','status':'blocked' if issues or not sinks else 'ready','blocking_issues':issues},
{'task':'live_production_enabled','status':'blocked' if (issues or not sinks or not policy.get('production_enabled')) else 'ready'}
],
'blocking_issues': issues + ([] if policy.get('production_enabled') else ['source-to-cex production policy remains disabled']),
'next_live_inputs_required': [
'at least one real off-chain sink row with include_in_baseline=true',
'enabled_for_production_handoff=true on the real sink',
'real deposit address and chain for the chosen sink',
'operational_status=enabled for the chosen sink',
'operator decision to set production_enabled=true after live canaries succeed'
]
}
write(REPORTS / 'source-to-cex-production-readiness-latest.json', payload)
lines = ['# Source To CEX Production Readiness','',f"- Generated: `{payload['generated_at']}`",f"- Ready for immediate live production: `{payload['ready_for_immediate_live_production']}`",f"- Policy production enabled: `{payload['policy']['production_enabled']}`"]
if payload.get('mainnet_funding_posture'):
lines.append(f"- Mainnet funding posture: `{payload['mainnet_funding_posture']['mode']}` via `{', '.join(payload['mainnet_funding_posture']['required_deployer_assets'])}`")
lines += ['','## Blocking Issues','']
for issue in payload['blocking_issues'] or ['none']:
lines.append(f"- {issue}")
if validation:
lines += ['','## Off-Chain Sink Validation','',f"- Validation ready: `{validation['ready']}`",f"- Included sink count: `{validation['included_sink_count']}`"]
for warning in validation.get('warnings', []):
lines.append(f"- Warning: {warning}")
lines += ['','## Next Live Inputs Required','']
for item in payload['next_live_inputs_required']:
lines.append(f'- {item}')
write_text(DOCS / 'SOURCE_TO_CEX_PRODUCTION_READINESS.md', '\n'.join(lines))
print(REPORTS / 'source-to-cex-production-readiness-latest.json')

View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
"""
Validate config/pmm-soak-wallet-grid.json: dimensions, linearIndex formula,
addresses, cellId, label vs networkCode/asn.
Exit 0 if OK; non-zero with message on stderr.
"""
from __future__ import annotations
import json
import re
import sys
from pathlib import Path
_REPO_LIB = Path(__file__).resolve().parent
if str(_REPO_LIB) not in sys.path:
sys.path.insert(0, str(_REPO_LIB))
from elemental_imperium_wallet_common import build_label, cell_id, linear_index # noqa: E402
ADDR = re.compile(r"^0x[0-9a-fA-F]{40}$")
MAX_ERRORS = 80
def main() -> int:
repo_root = Path(__file__).resolve().parents[2]
grid_path = repo_root / "config" / "pmm-soak-wallet-grid.json"
if not grid_path.is_file():
print("Missing config/pmm-soak-wallet-grid.json", file=sys.stderr)
return 1
data = json.loads(grid_path.read_text(encoding="utf-8"))
dim = data.get("dimensions") or {}
lp_n = int(dim.get("lpbcaCount", 33))
br_n = int(dim.get("branchCount", 33))
cl_n = int(dim.get("classCount", 6))
expected_count = lp_n * br_n * cl_n
wallets = data.get("wallets")
if not isinstance(wallets, list):
print("Invalid: wallets must be an array", file=sys.stderr)
return 1
errs: list[str] = []
if len(wallets) != expected_count:
errs.append(f"wallet count {len(wallets)} != dimensions product {expected_count}")
for i, w in enumerate(wallets):
if len(errs) >= MAX_ERRORS:
errs.append("… (further errors suppressed)")
break
try:
lpbca = int(w["lpbca"])
branch = int(w["branch"])
class_ = int(w["class"])
li = int(w["linearIndex"])
except (KeyError, TypeError, ValueError) as e:
errs.append(f"wallet {i}: bad coordinates or linearIndex ({e})")
continue
if not (0 <= lpbca < lp_n and 0 <= branch < br_n and 0 <= class_ < cl_n):
errs.append(f"wallet {i}: coordinate out of range")
exp_li = linear_index(lpbca, branch, class_)
if li != exp_li:
errs.append(f"wallet {i}: linearIndex {li} != expected {exp_li} (L{lpbca} B{branch} C{class_})")
exp_cid = cell_id(lpbca, branch, class_)
if w.get("cellId") != exp_cid:
errs.append(f"wallet {i}: cellId {w.get('cellId')!r} != {exp_cid!r}")
addr = w.get("address", "")
if not isinstance(addr, str) or not ADDR.match(addr):
errs.append(f"wallet {i}: invalid address {addr!r}")
nc = w.get("networkCode")
if nc is not None and (not isinstance(nc, str) or not nc.strip()):
errs.append(f"wallet {i}: invalid networkCode")
asn = w.get("asn")
if asn is not None and type(asn) is not int:
errs.append(f"wallet {i}: asn must be int or null")
if nc is not None:
exp_label = build_label(str(nc), exp_cid, asn if isinstance(asn, int) else None)
if w.get("label") != exp_label:
errs.append(
f"wallet {i}: label {w.get('label')!r} != expected {exp_label!r}"
)
if errs:
print("Elemental Imperium wallet grid validation failed:", file=sys.stderr)
for e in errs:
print(f" {e}", file=sys.stderr)
return 1
print(f"OK: {len(wallets)} wallets, linearIndex/cellId/labels consistent")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,138 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
POLICY_PATH="${PROJECT_ROOT}/config/extraction/mainnet-cwusdc-usdc-support-policy.json"
CHECKER="${PROJECT_ROOT}/scripts/verify/check-mainnet-cwusdc-usdc-support-health.py"
FORGE_TARGET="smom-dbis-138/script/flash/RunManagedMainnetAaveCwusdcUsdcQuotePushCycle.s.sol:RunManagedMainnetAaveCwusdcUsdcQuotePushCycle"
DRY_RUN=1
HEALTH_ONLY=0
FORCE_RUN=0
FLASH_OVERRIDE=""
usage() {
cat <<'EOF'
Usage: bash scripts/maintenance/run-mainnet-cwusdc-usdc-support.sh [options]
Options:
--health-only Print the current support decision and exit.
--broadcast Broadcast the managed cycle instead of simulating it.
--force Run the managed cycle even if the checker only recommends monitor/hold.
--flash-quote-amount-raw N
Override the policy-selected flash quote amount.
-h, --help Show this help text.
Behavior:
- Reads the canonical policy at config/extraction/mainnet-cwusdc-usdc-support-policy.json
- Checks the public Uniswap V2 pair and defended DODO venue
- Uses the managed Aave quote-push cycle against the defended DODO venue when intervention is recommended
- Defaults to simulation unless --broadcast is passed
EOF
}
while [[ $# -gt 0 ]]; do
case "$1" in
--health-only)
HEALTH_ONLY=1
shift
;;
--broadcast)
DRY_RUN=0
shift
;;
--force)
FORCE_RUN=1
shift
;;
--flash-quote-amount-raw)
FLASH_OVERRIDE="${2:-}"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo "ERROR: unknown option: $1" >&2
usage >&2
exit 1
;;
esac
done
[[ -f "$POLICY_PATH" ]] || { echo "ERROR: missing policy file: $POLICY_PATH" >&2; exit 1; }
[[ -f "$CHECKER" ]] || { echo "ERROR: missing checker script: $CHECKER" >&2; exit 1; }
eval "$(
python3 "$CHECKER" --shell
)"
echo "Policy: $POLICY_PATH"
echo "Public pair: ${PUBLIC_PAIR_ADDRESS}"
echo "Defended DODO pool: ${DEFENDED_POOL_ADDRESS}"
echo "Deviation: ${PUBLIC_PAIR_DEVIATION_BPS:-unknown} bps"
echo "Decision: ${DECISION_SEVERITY} / ${DECISION_ACTION}"
echo "Reasons: ${REASONS_JSON}"
if [[ "$HEALTH_ONLY" -eq 1 ]]; then
exit 0
fi
if [[ "$DECISION_ACTION" != "run_managed_cycle" && "$FORCE_RUN" -ne 1 ]]; then
echo "No managed cycle scheduled. Use --force to simulate anyway."
exit 0
fi
FLASH_QUOTE_AMOUNT_RAW="${FLASH_OVERRIDE:-$FLASH_QUOTE_AMOUNT_RAW}"
if [[ -z "${FLASH_QUOTE_AMOUNT_RAW:-}" || "${FLASH_QUOTE_AMOUNT_RAW}" == "0" ]]; then
echo "ERROR: no flash quote amount resolved from policy or override" >&2
exit 1
fi
for required_var in \
ETHEREUM_MAINNET_RPC \
DODO_PMM_INTEGRATION_MAINNET \
QUOTE_PUSH_EXTERNAL_UNWINDER_MAINNET \
AAVE_QUOTE_PUSH_RECEIVER_MAINNET \
QUOTE_PUSH_TREASURY_MANAGER_MAINNET \
UNWIND_MODE; do
if [[ -z "${!required_var:-}" ]]; then
echo "ERROR: missing required env var: ${required_var}" >&2
exit 1
fi
done
export PRIVATE_KEY="${KEEPER_PRIVATE_KEY:-${PRIVATE_KEY:-}}"
if [[ -z "${PRIVATE_KEY}" ]]; then
echo "ERROR: missing PRIVATE_KEY and KEEPER_PRIVATE_KEY" >&2
exit 1
fi
export POOL_CWUSDC_USDC_MAINNET="${DEFENDED_POOL_ADDRESS}"
export FLASH_QUOTE_AMOUNT_RAW
export QUOTE_PUSH_TREASURY_HARVEST="${QUOTE_PUSH_TREASURY_HARVEST:-1}"
export QUOTE_PUSH_TREASURY_GAS_HOLDBACK_TARGET_RAW="${GAS_HOLDBACK_TARGET_RAW}"
forge_cmd=(
forge script
"$FORGE_TARGET"
--root "$PROJECT_ROOT/smom-dbis-138"
--rpc-url "$ETHEREUM_MAINNET_RPC"
)
if [[ "$DRY_RUN" -eq 0 ]]; then
forge_cmd+=(--broadcast)
else
echo "Simulation only. Pass --broadcast to execute on-chain."
fi
printf 'Running:'
printf ' %q' "${forge_cmd[@]}"
printf '\n'
"${forge_cmd[@]}"

View File

@@ -156,6 +156,21 @@ else
fi
fi
# Elemental Imperium / PMM soak wallet matrix (optional; python3 required for full check)
if [[ -f "$PROJECT_ROOT/config/pmm-soak-wallet-grid.json" ]]; then
if command -v python3 &>/dev/null; then
log_info "Elemental Imperium wallet grid (pmm-soak-wallet-grid.json)..."
if python3 "$PROJECT_ROOT/scripts/lib/validate_elemental_imperium_wallet_grid.py"; then
log_ok "pmm-soak-wallet-grid.json: structure and label consistency"
else
log_err "pmm-soak-wallet-grid.json: validation failed"
ERRORS=$((ERRORS + 1))
fi
else
log_warn "pmm-soak-wallet-grid.json present but python3 missing; skipping EI grid validation"
fi
fi
if [[ -n "$OPTIONAL_ENV" ]]; then
for v in $OPTIONAL_ENV; do
check_env "$v" || true

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env bash
# Apply Elemental Imperium (33×33×6) wallet labels to config/pmm-soak-wallet-grid.json.
# Optional: --network-code CODE | --asn N (see scripts/lib/apply_elemental_imperium_wallet_labels.py)
set -euo pipefail
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
exec python3 "$ROOT/scripts/lib/apply_elemental_imperium_wallet_labels.py" "$@"

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
python3 "$REPO_ROOT/scripts/lib/chain_138_to_wemix_1111_bridge_completion_checklist.py"
echo "Built:"
echo " docs/03-deployment/CHAIN_138_TO_WEMIX_1111_BRIDGE_COMPLETION_CHECKLIST.md"
echo " reports/extraction/chain-138-to-wemix-1111-bridge-completion-checklist-latest.json"

View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
SRC_DIR="$REPO_ROOT/docs/04-configuration/pr-ready"
PKG_DIR="$SRC_DIR/eip155-138-package"
mkdir -p "$PKG_DIR"
cp "$SRC_DIR/eip155-138.json" "$PKG_DIR/eip155-138.chainlist.json"
cp "$SRC_DIR/trust-wallet-registry-chain138.json" "$PKG_DIR/trust-wallet-registry-chain138.json"
if command -v jq >/dev/null 2>&1; then
jq empty "$PKG_DIR/eip155-138.chainlist.json" >/dev/null
jq empty "$PKG_DIR/trust-wallet-registry-chain138.json" >/dev/null
jq empty "$PKG_DIR/manifest.json" >/dev/null
fi
echo "Wrote package to $PKG_DIR"

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/source_to_cex_execution_plan.py" >/dev/null
python3 "$ROOT/scripts/lib/source_to_cex_production_readiness.py" >/dev/null
python3 "$ROOT/scripts/lib/comprehensive_capital_baseline.py"

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
# Build output/cw-assets-usd-quote-dump.json — on-chain PMM quotes for cW* vs USD-like legs.
# Loads RPC URLs from smom-dbis-138/.env via repo root (optional: export env first).
set -euo pipefail
REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
cd "$REPO_ROOT"
if [[ -f "$REPO_ROOT/smom-dbis-138/.env" ]]; then
set -a
# shellcheck disable=SC1091
source "$REPO_ROOT/smom-dbis-138/.env"
set +a
fi
OUT="${1:-$REPO_ROOT/output/cw-assets-usd-quote-dump.json}"
exec python3 "$REPO_ROOT/scripts/lib/dump_cw_usd_quotes.py" --output "$OUT"

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_gru_v2_full_mesh_gap_report.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_first_deployment_matrix.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_liquidity_program.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_live_pair_discovery.py "$@"

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_phase_order.py

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/promod_uniswap_v2_phase1_bridge_3x_matrix.py"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/promod_uniswap_v2_phase1_bridge_3x_operator_packet.py"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/promod_uniswap_v2_phase1_bridge_from_138_matrix.py"

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase1_bridge_or_mint_runbook.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_phase1_execution_checklist.py

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/promod_uniswap_v2_phase1_funding_actions.py"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/promod_uniswap_v2_phase1_funding_bundle.py"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/promod_uniswap_v2_phase1_funding_readiness.py"

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase1_remaining_8_paste_pack.py

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase1_remaining_8_shell_paste_pack.py

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env bash
set -euo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_operator_sequence.py >/dev/null
python3 scripts/lib/promod_uniswap_v2_phase2_operator_paste_pack.py

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_operator_sequence.py

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env bash
set -euo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_operator_sequence.py >/dev/null
python3 scripts/lib/promod_uniswap_v2_phase2_shell_paste_pack.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$PROJECT_ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_wave1_completion_status.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_wave2_operator_paste_pack.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_wave2_operator_sequence.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_phase2_wave2_shell_paste_pack.py

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
python3 scripts/lib/promod_uniswap_v2_promotion_gates.py

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/source_to_cex_execution_plan.py"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/source_to_cex_offchain_sink_tool.py" validate >/dev/null || true
python3 "$ROOT/scripts/lib/source_to_cex_production_readiness.py"

View File

@@ -0,0 +1,327 @@
#!/usr/bin/env python3
from __future__ import annotations
from decimal import Decimal, getcontext
from pathlib import Path
import argparse
import json
import os
import re
import shlex
import subprocess
import sys
getcontext().prec = 42
ROOT = Path(__file__).resolve().parents[2]
POLICY_PATH = ROOT / "config" / "extraction" / "mainnet-cwusdc-usdc-support-policy.json"
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
ROOT_ENV_PATH = ROOT / ".env"
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
UINT_RE = re.compile(r"\b\d+\b")
def load_json(path: Path) -> dict:
return json.loads(path.read_text())
def load_env_file(path: Path) -> dict[str, str]:
values: dict[str, str] = {}
if not path.exists():
return values
for raw_line in path.read_text().splitlines():
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
values[key.strip()] = value.strip().strip('"').strip("'")
return values
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
if seen is None:
seen = set()
if key in seen:
return env_values.get(key, "")
seen.add(key)
value = os.environ.get(key) or env_values.get(key, "")
if value.startswith("${") and value.endswith("}"):
inner = value[2:-1]
target = inner.split(":-", 1)[0]
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
resolved = resolve_env_value(target, env_values, seen)
return resolved or fallback
return value
def merged_env_values() -> dict[str, str]:
values = {}
values.update(load_env_file(ROOT_ENV_PATH))
values.update(load_env_file(SMOM_ENV_PATH))
return values
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
return subprocess.check_output(cmd, text=True).strip()
def parse_uint(value: str) -> int:
matches = UINT_RE.findall(value)
if not matches:
raise ValueError(f"could not parse integer from {value!r}")
return int(matches[0])
def parse_uints(value: str, count: int) -> list[int]:
matches = [int(match) for match in UINT_RE.findall(value)]
if len(matches) < count:
raise ValueError(f"expected at least {count} integers, got {matches!r}")
return matches[:count]
def parse_address(value: str) -> str:
match = re.search(r"0x[a-fA-F0-9]{40}", value)
if not match:
raise ValueError(f"could not parse address from {value!r}")
return match.group(0)
def decimal_ratio(numerator: Decimal, denominator: Decimal) -> Decimal:
if denominator == 0:
return Decimal(0)
return numerator / denominator
def normalize_units(raw: int, decimals: int) -> Decimal:
return Decimal(raw) / (Decimal(10) ** decimals)
def compute_deviation_bps(price: Decimal, target: Decimal = Decimal("1")) -> Decimal:
if target == 0:
return Decimal(0)
return abs((price - target) / target) * Decimal(10000)
def resolve_rpc_url(policy: dict, env_values: dict[str, str]) -> str:
for key in policy["network"].get("rpcEnvKeys", []):
value = resolve_env_value(key, env_values)
if value:
return value.rstrip("\r\n")
raise RuntimeError("missing mainnet RPC URL")
def load_public_pair_from_policy(policy: dict, deployment_status: dict) -> dict:
configured = dict(policy["publicPair"])
chain = deployment_status["chains"][str(policy["network"]["chainId"])]
for row in chain.get("uniswapV2Pools", []):
if row.get("base") == configured["base"] and row.get("quote") == configured["quote"]:
configured.update(
{
"poolAddress": row.get("poolAddress", configured["poolAddress"]),
"factoryAddress": row.get("factoryAddress"),
"routerAddress": row.get("routerAddress"),
}
)
break
return configured
def query_uniswap_pair_health(rpc_url: str, pair: dict) -> dict:
pair_address = pair["poolAddress"]
if not pair_address or pair_address.lower() == ZERO_ADDRESS:
return {"live": False, "poolAddress": pair_address}
token0 = parse_address(cast_call(rpc_url, pair_address, "token0()(address)"))
token1 = parse_address(cast_call(rpc_url, pair_address, "token1()(address)"))
reserve0_raw, reserve1_raw, _ = parse_uints(cast_call(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3)
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
base_addr = pair.get("baseAddress")
quote_addr = pair.get("quoteAddress")
if not base_addr or not quote_addr:
raise RuntimeError("pair token addresses not supplied")
if token0.lower() == base_addr.lower():
base_raw, quote_raw = reserve0_raw, reserve1_raw
base_decimals, quote_decimals = decimals0, decimals1
elif token1.lower() == base_addr.lower():
base_raw, quote_raw = reserve1_raw, reserve0_raw
base_decimals, quote_decimals = decimals1, decimals0
else:
raise RuntimeError("pair tokens do not match configured base/quote")
base_units = normalize_units(base_raw, base_decimals)
quote_units = normalize_units(quote_raw, quote_decimals)
price = decimal_ratio(quote_units, base_units)
deviation_bps = compute_deviation_bps(price)
return {
"live": True,
"poolAddress": pair_address,
"token0": token0,
"token1": token1,
"baseReserveRaw": str(base_raw),
"quoteReserveRaw": str(quote_raw),
"baseReserveUnits": str(base_units),
"quoteReserveUnits": str(quote_units),
"priceQuotePerBase": str(price),
"deviationBps": str(deviation_bps),
}
def query_dodo_health(rpc_url: str, defended_venue: dict) -> dict:
pool_address = defended_venue["poolAddress"]
try:
mid_price_raw = parse_uint(cast_call(rpc_url, pool_address, "getMidPrice()(uint256)"))
base_reserve_raw, quote_reserve_raw = parse_uints(
cast_call(rpc_url, pool_address, "getVaultReserve()(uint256,uint256)"), 2
)
except Exception as exc:
return {
"live": False,
"poolAddress": pool_address,
"error": str(exc),
}
mid_price = normalize_units(mid_price_raw, 18)
deviation_bps = compute_deviation_bps(mid_price)
return {
"live": True,
"poolAddress": pool_address,
"midPrice": str(mid_price),
"deviationBps": str(deviation_bps),
"baseReserveRaw": str(base_reserve_raw),
"quoteReserveRaw": str(quote_reserve_raw),
}
def choose_flash_amount(policy: dict, deviation_bps: Decimal) -> int:
for row in policy["managedCycle"]["quoteAmountByDeviationBps"]:
if deviation_bps >= Decimal(row["minDeviationBps"]):
return int(row["flashQuoteAmountRaw"])
return 0
def build_decision(policy: dict, public_health: dict, defended_health: dict) -> dict:
thresholds = policy["thresholds"]
reasons: list[str] = []
severity = "ok"
action = "hold"
if not public_health.get("live"):
severity = "critical"
action = "manual_recover_public_pair"
reasons.append("public pair missing or unreadable")
deviation_bps = Decimal(0)
else:
deviation_bps = Decimal(public_health["deviationBps"])
base_units = Decimal(public_health["baseReserveUnits"])
quote_units = Decimal(public_health["quoteReserveUnits"])
thin_base = base_units < Decimal(thresholds["minBaseReserveUnits"])
thin_quote = quote_units < Decimal(thresholds["minQuoteReserveUnits"])
if thin_base or thin_quote:
severity = "thin"
action = "manual_reseed_public_lane"
reasons.append("public pair reserve depth below policy floor")
if deviation_bps >= Decimal(thresholds["criticalDeviationBps"]):
severity = "critical"
action = "run_managed_cycle"
reasons.append("public pair outside critical deviation corridor")
elif deviation_bps >= Decimal(thresholds["interveneDeviationBps"]):
severity = "intervene"
action = "run_managed_cycle"
reasons.append("public pair outside intervention corridor")
elif deviation_bps >= Decimal(thresholds["warnDeviationBps"]):
severity = "warn" if severity == "ok" else severity
if action == "hold":
action = "monitor"
reasons.append("public pair outside warn corridor")
if action == "run_managed_cycle" and not defended_health.get("live"):
severity = "blocked"
action = "manual_fix_defended_venue"
reasons.append("defended DODO venue unavailable")
flash_amount = choose_flash_amount(policy, deviation_bps) if action == "run_managed_cycle" else 0
if action == "run_managed_cycle" and flash_amount == 0:
severity = "warn"
action = "monitor"
reasons.append("deviation does not map to a configured flash amount")
return {
"severity": severity,
"action": action,
"flashQuoteAmountRaw": flash_amount,
"gasHoldbackTargetRaw": int(policy["managedCycle"]["defaultGasHoldbackTargetRaw"]),
"reasons": reasons,
}
def render_shell(result: dict) -> str:
decision = result["decision"]
public_pair = result["publicPair"]
defended = result["defendedVenue"]
values = {
"PUBLIC_PAIR_ADDRESS": public_pair["poolAddress"],
"DEFENDED_POOL_ADDRESS": defended["poolAddress"],
"PUBLIC_PAIR_DEVIATION_BPS": decision.get("publicDeviationBps", ""),
"DECISION_SEVERITY": decision["severity"],
"DECISION_ACTION": decision["action"],
"FLASH_QUOTE_AMOUNT_RAW": str(decision["flashQuoteAmountRaw"]),
"GAS_HOLDBACK_TARGET_RAW": str(decision["gasHoldbackTargetRaw"]),
"REASONS_JSON": json.dumps(decision["reasons"]),
}
lines = [
f"{key}={shlex.quote(value)}" for key, value in values.items()
]
return "\n".join(lines)
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument("--shell", action="store_true", help="Emit shell-friendly KEY=VALUE lines.")
args = parser.parse_args()
policy = load_json(POLICY_PATH)
deployment_status = load_json(DEPLOYMENT_STATUS)
env_values = merged_env_values()
rpc_url = resolve_rpc_url(policy, env_values)
chain = deployment_status["chains"][str(policy["network"]["chainId"])]
public_pair = load_public_pair_from_policy(policy, deployment_status)
public_pair["baseAddress"] = chain["cwTokens"][public_pair["base"]]
public_pair["quoteAddress"] = chain["anchorAddresses"][public_pair["quote"]]
defended_venue = dict(policy["defendedVenue"])
public_health = query_uniswap_pair_health(rpc_url, public_pair)
defended_health = query_dodo_health(rpc_url, defended_venue)
decision = build_decision(policy, public_health, defended_health)
if public_health.get("live"):
decision["publicDeviationBps"] = public_health["deviationBps"]
result = {
"policy": {
"path": str(POLICY_PATH),
"thresholds": policy["thresholds"],
},
"publicPair": public_pair,
"publicPairHealth": public_health,
"defendedVenue": defended_venue,
"defendedVenueHealth": defended_health,
"decision": decision,
}
if args.shell:
print(render_shell(result))
else:
print(json.dumps(result, indent=2))
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$ROOT"
source smom-dbis-138/scripts/load-env.sh >/dev/null
python3 scripts/lib/execute_promod_uniswap_v2_phase2_wave2_live_subset.py

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/source_to_cex_offchain_sink_tool.py" import-env "$@"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Validate config/pmm-soak-wallet-grid.json (6534 wallets, labels, linearIndex).
set -euo pipefail
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
exec python3 "$ROOT/scripts/lib/validate_elemental_imperium_wallet_grid.py"

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
python3 "$ROOT/scripts/lib/source_to_cex_offchain_sink_tool.py" validate