Decomposes backend/api/rest/ai.go (which the review flagged at 1180 lines
and which was the largest file in the repo by a wide margin) into six
purpose-built files inside the same package, so no import paths change
for any caller and *Server receivers keep working:
ai.go 198 handlers + feature flags + exported AI* DTOs
ai_context.go 381 buildAIContext + indexed-DB queries
(stats / tx / address / block) + regex patterns +
extractBlockReference
ai_routes.go 139 queryAIRoutes + filterAIRouteMatches +
routeMatchesQuery + normalizeHexString
ai_docs.go 136 loadAIDocSnippets + findAIWorkspaceRoot +
scanDocForTerms + buildDocSearchTerms
ai_xai.go 267 xAI / OpenAI request/response types +
normalizeAIMessages + latestUserMessage +
callXAIChatCompletions + parseXAIError +
extractOutputText
ai_helpers.go 112 pure-function utilities (firstRegexMatch,
compactStringMap, compactAnyMap, stringValue,
stringSliceValue, uniqueStrings, clipString,
fileExists)
ai_runtime.go (rate limiter + metrics + audit log) is unchanged.
This is a pure move: no logic changes, no new public API, no changes to
HTTP routes. Each file carries only the imports it actually uses so
goimports is clean on every file individually. Every exported symbol
retained its original spelling so callers (routes.go, server.go, and
the AI e2e tests) keep compiling without edits.
Verification:
go build ./... clean
go vet ./... clean
go test ./api/rest/... PASS
staticcheck ./... clean on the SA* correctness family
Advances completion criterion 6 (backend maintainability): 'no single
Go file exceeds a few hundred lines; AI/LLM plumbing is separated from
HTTP handlers; context-building is separated from upstream calls.'
140 lines
4.0 KiB
Go
140 lines
4.0 KiB
Go
package rest
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"net/http"
|
|
"strings"
|
|
"time"
|
|
)
|
|
|
|
func (s *Server) queryAIRoutes(ctx context.Context, query string) ([]map[string]any, string) {
|
|
baseURL := strings.TrimSpace(firstNonEmptyEnv(
|
|
"TOKEN_AGGREGATION_API_BASE",
|
|
"TOKEN_AGGREGATION_URL",
|
|
"TOKEN_AGGREGATION_BASE_URL",
|
|
))
|
|
if baseURL == "" {
|
|
return nil, "token aggregation api base url is not configured for ai route retrieval"
|
|
}
|
|
|
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, strings.TrimRight(baseURL, "/")+"/api/v1/routes/ingestion?fromChainId=138", nil)
|
|
if err != nil {
|
|
return nil, "unable to build token aggregation ai request"
|
|
}
|
|
|
|
client := &http.Client{Timeout: 6 * time.Second}
|
|
resp, err := client.Do(req)
|
|
if err != nil {
|
|
return nil, "token aggregation live routes unavailable: " + err.Error()
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode >= 400 {
|
|
return nil, fmt.Sprintf("token aggregation live routes returned %d", resp.StatusCode)
|
|
}
|
|
|
|
var payload struct {
|
|
Routes []map[string]any `json:"routes"`
|
|
}
|
|
if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil {
|
|
return nil, "unable to decode token aggregation live routes"
|
|
}
|
|
if len(payload.Routes) == 0 {
|
|
return nil, "token aggregation returned no live routes"
|
|
}
|
|
|
|
matches := filterAIRouteMatches(payload.Routes, query)
|
|
return matches, ""
|
|
}
|
|
|
|
func filterAIRouteMatches(routes []map[string]any, query string) []map[string]any {
|
|
query = strings.ToLower(strings.TrimSpace(query))
|
|
matches := make([]map[string]any, 0, 6)
|
|
for _, route := range routes {
|
|
if query != "" && !routeMatchesQuery(route, query) {
|
|
continue
|
|
}
|
|
trimmed := map[string]any{
|
|
"routeId": route["routeId"],
|
|
"status": route["status"],
|
|
"routeType": route["routeType"],
|
|
"fromChainId": route["fromChainId"],
|
|
"toChainId": route["toChainId"],
|
|
"tokenInSymbol": route["tokenInSymbol"],
|
|
"tokenOutSymbol": route["tokenOutSymbol"],
|
|
"assetSymbol": route["assetSymbol"],
|
|
"label": route["label"],
|
|
"aggregatorFamilies": route["aggregatorFamilies"],
|
|
"hopCount": route["hopCount"],
|
|
"bridgeType": route["bridgeType"],
|
|
"tags": route["tags"],
|
|
}
|
|
matches = append(matches, compactAnyMap(trimmed))
|
|
if len(matches) >= 6 {
|
|
break
|
|
}
|
|
}
|
|
if len(matches) == 0 {
|
|
for _, route := range routes {
|
|
trimmed := map[string]any{
|
|
"routeId": route["routeId"],
|
|
"status": route["status"],
|
|
"routeType": route["routeType"],
|
|
"fromChainId": route["fromChainId"],
|
|
"toChainId": route["toChainId"],
|
|
"tokenInSymbol": route["tokenInSymbol"],
|
|
"tokenOutSymbol": route["tokenOutSymbol"],
|
|
"assetSymbol": route["assetSymbol"],
|
|
"label": route["label"],
|
|
"aggregatorFamilies": route["aggregatorFamilies"],
|
|
}
|
|
matches = append(matches, compactAnyMap(trimmed))
|
|
if len(matches) >= 4 {
|
|
break
|
|
}
|
|
}
|
|
}
|
|
return matches
|
|
}
|
|
|
|
func normalizeHexString(value string) string {
|
|
trimmed := strings.TrimSpace(strings.ToLower(value))
|
|
return strings.TrimPrefix(trimmed, "0x")
|
|
}
|
|
|
|
func routeMatchesQuery(route map[string]any, query string) bool {
|
|
fields := []string{
|
|
stringValue(route["routeId"]),
|
|
stringValue(route["routeType"]),
|
|
stringValue(route["tokenInSymbol"]),
|
|
stringValue(route["tokenOutSymbol"]),
|
|
stringValue(route["assetSymbol"]),
|
|
stringValue(route["label"]),
|
|
}
|
|
for _, field := range fields {
|
|
if strings.Contains(strings.ToLower(field), query) {
|
|
return true
|
|
}
|
|
}
|
|
for _, value := range stringSliceValue(route["aggregatorFamilies"]) {
|
|
if strings.Contains(strings.ToLower(value), query) {
|
|
return true
|
|
}
|
|
}
|
|
for _, value := range stringSliceValue(route["tags"]) {
|
|
if strings.Contains(strings.ToLower(value), query) {
|
|
return true
|
|
}
|
|
}
|
|
for _, symbol := range []string{"cusdt", "cusdc", "cxauc", "ceurt", "usdt", "usdc", "weth"} {
|
|
if strings.Contains(query, symbol) {
|
|
if strings.Contains(strings.ToLower(strings.Join(fields, " ")), symbol) {
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
return false
|
|
}
|