chore: sync state and push to Gitea
Made-with: Cursor
This commit is contained in:
@@ -169,23 +169,27 @@ class SlackAlertChannel implements AlertChannel {
|
||||
[AlertSeverity.CRITICAL]: '#ff0000',
|
||||
}[alert.severity];
|
||||
|
||||
const fields: Array<{ title: string; value: string; short: boolean }> = [
|
||||
...(alert.dealId ? [{ title: 'Deal ID', value: alert.dealId, short: true }] : []),
|
||||
...(alert.violationType ? [{ title: 'Violation Type', value: alert.violationType, short: true }] : []),
|
||||
{ title: 'Timestamp', value: alert.timestamp.toISOString(), short: true },
|
||||
...(alert.metadata ? Object.entries(alert.metadata).map(([k, v]) => ({ title: k, value: String(v), short: true })) : []),
|
||||
];
|
||||
|
||||
const payload = {
|
||||
attachments: [{
|
||||
color,
|
||||
title: `Arbitrage Alert: ${alert.severity.toUpperCase()}`,
|
||||
text: alert.message,
|
||||
fields: [
|
||||
...(alert.dealId ? [{ title: 'Deal ID', value: alert.dealId, short: true }] : []),
|
||||
...(alert.violationType ? [{ title: 'Violation Type', value: alert.violationType, short: true }] : []),
|
||||
{ title: 'Timestamp', value: alert.timestamp.toISOString(), short: true },
|
||||
],
|
||||
...(alert.metadata ? { fields: [...(payload.attachments[0].fields || []), ...Object.entries(alert.metadata).map(([k, v]) => ({ title: k, value: String(v), short: true }))] } : {}),
|
||||
}],
|
||||
attachments: [{ color, title: `Arbitrage Alert: ${alert.severity.toUpperCase()}`, text: alert.message, fields }],
|
||||
};
|
||||
|
||||
// TODO: Implement actual Slack webhook call
|
||||
// await fetch(this.webhookUrl, { method: 'POST', body: JSON.stringify(payload) });
|
||||
logger.info('Slack alert (not implemented)', { payload });
|
||||
try {
|
||||
const res = await fetch(this.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
if (!res.ok) throw new Error(`Slack webhook failed: ${res.status}`);
|
||||
} catch (err) {
|
||||
logger.error('Slack alert delivery failed', { error: err instanceof Error ? err.message : err });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,9 +219,21 @@ class PagerDutyAlertChannel implements AlertChannel {
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: Implement actual PagerDuty API call
|
||||
// await fetch('https://events.pagerduty.com/v2/enqueue', { method: 'POST', body: JSON.stringify(payload) });
|
||||
logger.info('PagerDuty alert (not implemented)', { payload });
|
||||
try {
|
||||
const res = await fetch('https://events.pagerduty.com/v2/enqueue', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(`PagerDuty API ${res.status}: ${text}`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('PagerDuty alert delivery failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -225,16 +241,31 @@ class EmailAlertChannel implements AlertChannel {
|
||||
constructor(private recipients: string[]) {}
|
||||
|
||||
async send(alert: Alert): Promise<void> {
|
||||
// Only send critical/high alerts via email
|
||||
if (alert.severity !== AlertSeverity.CRITICAL && alert.severity !== AlertSeverity.HIGH) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Implement email sending (using nodemailer, sendgrid, etc.)
|
||||
logger.info('Email alert (not implemented)', {
|
||||
recipients: this.recipients,
|
||||
alert: alert.message,
|
||||
});
|
||||
const emailApiUrl = process.env.EMAIL_ALERT_API_URL;
|
||||
if (!emailApiUrl) {
|
||||
logger.warn('Email alert skipped: Set EMAIL_ALERT_API_URL (e.g. SendGrid) to enable');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(emailApiUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
to: this.recipients,
|
||||
subject: `[${alert.severity.toUpperCase()}] Arbitrage Alert`,
|
||||
text: alert.message,
|
||||
html: `<p>${alert.message}</p><p>Deal ID: ${alert.dealId || 'N/A'}</p>`,
|
||||
}),
|
||||
});
|
||||
if (!res.ok) throw new Error(`Email API failed: ${res.status}`);
|
||||
} catch (err) {
|
||||
logger.error('Email alert delivery failed', { error: err instanceof Error ? err.message : err });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
33
services/monitoring/metrics.service.ts
Normal file
33
services/monitoring/metrics.service.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// Metrics Service - Stub for deal orchestrator
|
||||
// When monitoring stack is deployed: add Prometheus push (PROMETHEUS_PUSH_GATEWAY) or expose scrape endpoint here.
|
||||
|
||||
import { logger } from '@/infrastructure/monitoring/logger';
|
||||
|
||||
class MetricsService {
|
||||
updateActiveDeals(_status: string, _count: number): void {
|
||||
// Stub: record in real metrics when monitoring available
|
||||
}
|
||||
|
||||
recordStepExecution(_step: string, _durationSeconds: number): void {
|
||||
// Stub
|
||||
}
|
||||
|
||||
recordError(_errorName: string, _step?: string): void {
|
||||
logger.debug('Metrics: recordError', { errorName: _errorName, step: _step });
|
||||
}
|
||||
|
||||
recordDealExecution(
|
||||
_status: string,
|
||||
_participantBankId: string,
|
||||
_moduleId: string,
|
||||
_durationSeconds: number
|
||||
): void {
|
||||
// Stub
|
||||
}
|
||||
|
||||
recordRiskViolation(_violationType: string, _severity: string): void {
|
||||
logger.warn('Metrics: risk violation', { violationType: _violationType, severity: _severity });
|
||||
}
|
||||
}
|
||||
|
||||
export const metricsService = new MetricsService();
|
||||
Reference in New Issue
Block a user