- Submodule pins: dbis_core, cross-chain-pmm-lps, mcp-proxmox (local, push may be pending), metamask-integration, smom-dbis-138 - Atomic swap + cross-chain-pmm-lops-publish, deploy-portal workflow, phoenix deploy-targets, routing/aggregator matrices - Docs, token-lists, forge proxy, phoenix API, runbooks, verify scripts Made-with: Cursor
230 lines
7.9 KiB
Python
230 lines
7.9 KiB
Python
#!/usr/bin/env python3
|
|
"""Build a strict completion report for non-live liquidity venue inventory."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import csv
|
|
import json
|
|
from collections import Counter, defaultdict
|
|
from datetime import datetime, timezone
|
|
from pathlib import Path
|
|
|
|
|
|
ROOT = Path(__file__).resolve().parents[2]
|
|
MASTER_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
|
OUT_JSON = ROOT / "reports/status/liquidity-pools-completion-latest.json"
|
|
OUT_MD = ROOT / "reports/status/liquidity-pools-completion-latest.md"
|
|
OUT_CSV = ROOT / "reports/status/liquidity-pools-completion-remaining-latest.csv"
|
|
|
|
|
|
CSV_FIELDS = [
|
|
"chainId",
|
|
"network",
|
|
"venue",
|
|
"pair",
|
|
"poolAddress",
|
|
"baseTokenAddress",
|
|
"quoteTokenAddress",
|
|
"status",
|
|
"completionClass",
|
|
]
|
|
|
|
|
|
def md_table(headers: list[str], rows: list[list[str]]) -> str:
|
|
out = ["| " + " | ".join(headers) + " |", "| " + " | ".join(["---"] * len(headers)) + " |"]
|
|
out.extend("| " + " | ".join(row) + " |" for row in rows)
|
|
return "\n".join(out)
|
|
|
|
|
|
def completion_class(status: str) -> str:
|
|
if status.startswith("live"):
|
|
return "complete"
|
|
if status == "planned_gas_placeholder":
|
|
return "planned_gas_surface"
|
|
if status in {"configured_no_live_balance", "configured_no_code", "configured_code_present_no_balance_read"}:
|
|
return "deploy_or_verify_live_balance"
|
|
if status == "planned_reference_placeholder":
|
|
return "planned_reference_surface"
|
|
if status == "supported_not_live":
|
|
return "supported_but_not_live"
|
|
return status
|
|
|
|
|
|
def build() -> dict:
|
|
data = json.loads(MASTER_JSON.read_text())
|
|
generated_at = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
|
|
|
chain_rows = []
|
|
remaining_csv_rows = []
|
|
totals = Counter()
|
|
|
|
for chain in data["chains"]:
|
|
remaining_pools = [p for p in chain["pools"] if not str(p["status"]).startswith("live")]
|
|
remaining_refs = [r for r in chain.get("referenceVenues", []) if not str(r["status"]).startswith("live")]
|
|
|
|
pool_status_counts = Counter(p["status"] for p in remaining_pools)
|
|
ref_status_counts = Counter(r["status"] for r in remaining_refs)
|
|
venue_counts = Counter(p["venue"] for p in remaining_pools)
|
|
|
|
totals.update(pool_status_counts)
|
|
|
|
chain_rows.append(
|
|
{
|
|
"chainId": chain["chainId"],
|
|
"network": chain["network"],
|
|
"remainingPoolCount": len(remaining_pools),
|
|
"remainingReferenceVenueCount": len(remaining_refs),
|
|
"poolStatusCounts": dict(pool_status_counts),
|
|
"referenceStatusCounts": dict(ref_status_counts),
|
|
"remainingVenueCounts": dict(venue_counts),
|
|
"remainingPools": [
|
|
{
|
|
"venue": p["venue"],
|
|
"pair": f"{p['baseSymbol']}/{p['quoteSymbol']}",
|
|
"poolAddress": p["poolAddress"],
|
|
"baseTokenAddress": p["baseAddress"],
|
|
"quoteTokenAddress": p["quoteAddress"],
|
|
"status": p["status"],
|
|
"completionClass": completion_class(p["status"]),
|
|
}
|
|
for p in remaining_pools
|
|
],
|
|
"remainingReferenceVenues": [
|
|
{
|
|
"protocol": r["protocol"],
|
|
"pair": f"{r['baseSymbol']}/{r['quoteSymbol']}",
|
|
"venueAddress": r["venueAddress"],
|
|
"status": r["status"],
|
|
"completionClass": completion_class(r["status"]),
|
|
}
|
|
for r in remaining_refs
|
|
],
|
|
}
|
|
)
|
|
|
|
for p in remaining_pools:
|
|
remaining_csv_rows.append(
|
|
{
|
|
"chainId": chain["chainId"],
|
|
"network": chain["network"],
|
|
"venue": p["venue"],
|
|
"pair": f"{p['baseSymbol']}/{p['quoteSymbol']}",
|
|
"poolAddress": p["poolAddress"],
|
|
"baseTokenAddress": p["baseAddress"],
|
|
"quoteTokenAddress": p["quoteAddress"],
|
|
"status": p["status"],
|
|
"completionClass": completion_class(p["status"]),
|
|
}
|
|
)
|
|
|
|
return {
|
|
"generatedAt": generated_at,
|
|
"source": str(MASTER_JSON.relative_to(ROOT)),
|
|
"summary": {
|
|
"remainingPools": sum(r["remainingPoolCount"] for r in chain_rows),
|
|
"remainingReferenceVenues": sum(r["remainingReferenceVenueCount"] for r in chain_rows),
|
|
"poolStatusCounts": dict(totals),
|
|
},
|
|
"chains": chain_rows,
|
|
"remainingCsvRows": remaining_csv_rows,
|
|
}
|
|
|
|
|
|
def write_csv(rows: list[dict]) -> None:
|
|
with OUT_CSV.open("w", newline="") as fh:
|
|
writer = csv.DictWriter(fh, fieldnames=CSV_FIELDS)
|
|
writer.writeheader()
|
|
writer.writerows(rows)
|
|
|
|
|
|
def write_markdown(report: dict) -> None:
|
|
lines = [
|
|
"# Liquidity Pools Completion Report",
|
|
"",
|
|
f"- Generated: `{report['generatedAt']}`",
|
|
f"- Source: `{report['source']}`",
|
|
"",
|
|
"## Summary",
|
|
"",
|
|
f"- Remaining non-live pool rows: `{report['summary']['remainingPools']}`",
|
|
f"- Remaining non-live reference venue rows: `{report['summary']['remainingReferenceVenues']}`",
|
|
f"- Pool status counts: `{json.dumps(report['summary']['poolStatusCounts'], sort_keys=True)}`",
|
|
"",
|
|
]
|
|
|
|
summary_rows = []
|
|
for chain in report["chains"]:
|
|
summary_rows.append(
|
|
[
|
|
str(chain["chainId"]),
|
|
chain["network"],
|
|
str(chain["remainingPoolCount"]),
|
|
str(chain["remainingReferenceVenueCount"]),
|
|
json.dumps(chain["poolStatusCounts"], sort_keys=True),
|
|
]
|
|
)
|
|
lines += [
|
|
"## By Chain",
|
|
"",
|
|
md_table(["ChainID", "Network", "Remaining Pools", "Remaining Reference Venues", "Pool Status Counts"], summary_rows),
|
|
"",
|
|
]
|
|
|
|
for chain in report["chains"]:
|
|
if chain["remainingPoolCount"] == 0 and chain["remainingReferenceVenueCount"] == 0:
|
|
continue
|
|
lines += [f"## {chain['network']} ({chain['chainId']})", ""]
|
|
|
|
if chain["remainingPoolCount"]:
|
|
pool_rows = [
|
|
[
|
|
p["venue"],
|
|
p["pair"],
|
|
f"`{p['poolAddress']}`",
|
|
p["status"],
|
|
p["completionClass"],
|
|
]
|
|
for p in chain["remainingPools"]
|
|
]
|
|
lines += [
|
|
"### Remaining Pools",
|
|
"",
|
|
md_table(["Venue", "Pair", "Pool", "Status", "Completion Class"], pool_rows),
|
|
"",
|
|
]
|
|
|
|
if chain["remainingReferenceVenueCount"]:
|
|
ref_rows = [
|
|
[
|
|
r["protocol"],
|
|
r["pair"],
|
|
f"`{r['venueAddress']}`" if r["venueAddress"] else "—",
|
|
r["status"],
|
|
r["completionClass"],
|
|
]
|
|
for r in chain["remainingReferenceVenues"]
|
|
]
|
|
lines += [
|
|
"### Remaining Reference Venues",
|
|
"",
|
|
md_table(["Protocol", "Pair", "Venue Address", "Status", "Completion Class"], ref_rows),
|
|
"",
|
|
]
|
|
|
|
OUT_MD.write_text("\n".join(lines) + "\n")
|
|
|
|
|
|
def main() -> int:
|
|
report = build()
|
|
OUT_JSON.write_text(json.dumps(report, indent=2) + "\n")
|
|
write_markdown(report)
|
|
write_csv(report["remainingCsvRows"])
|
|
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
|
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
|
print(f"Wrote {OUT_CSV.relative_to(ROOT)}")
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
raise SystemExit(main())
|