chore: sync submodule state (parent ref update)
Made-with: Cursor
This commit is contained in:
@@ -24,7 +24,7 @@ export class ComputeDistributionService {
|
||||
if (request.preferredNodeType) {
|
||||
nodes = await dscmNodeManagerService.getNodesByType(request.preferredNodeType);
|
||||
} else {
|
||||
nodes = await prisma.dscmNode.findMany({
|
||||
nodes = await prisma.dscm_nodes.findMany({
|
||||
where: {
|
||||
status: 'active',
|
||||
},
|
||||
@@ -71,7 +71,7 @@ export class ComputeDistributionService {
|
||||
// Create compute task
|
||||
const taskId = `TASK-${uuidv4()}`;
|
||||
|
||||
const task = await prisma.computeTask.create({
|
||||
const task = await prisma.compute_tasks.create({
|
||||
data: {
|
||||
taskId,
|
||||
nodeId: selectedNode.nodeId,
|
||||
@@ -92,7 +92,7 @@ export class ComputeDistributionService {
|
||||
* Start task execution
|
||||
*/
|
||||
async startTask(taskId: string) {
|
||||
return await prisma.computeTask.update({
|
||||
return await prisma.compute_tasks.update({
|
||||
where: { taskId },
|
||||
data: {
|
||||
status: 'executing',
|
||||
@@ -105,7 +105,7 @@ export class ComputeDistributionService {
|
||||
* Complete task
|
||||
*/
|
||||
async completeTask(taskId: string, result: Record<string, unknown>) {
|
||||
return await prisma.computeTask.update({
|
||||
return await prisma.compute_tasks.update({
|
||||
where: { taskId },
|
||||
data: {
|
||||
status: 'completed',
|
||||
@@ -119,7 +119,7 @@ export class ComputeDistributionService {
|
||||
* Get task by ID
|
||||
*/
|
||||
async getTask(taskId: string) {
|
||||
return await prisma.computeTask.findUnique({
|
||||
return await prisma.compute_tasks.findUnique({
|
||||
where: { taskId },
|
||||
include: {
|
||||
node: true,
|
||||
@@ -136,7 +136,7 @@ export class ComputeDistributionService {
|
||||
where.status = status;
|
||||
}
|
||||
|
||||
return await prisma.computeTask.findMany({
|
||||
return await prisma.compute_tasks.findMany({
|
||||
where,
|
||||
orderBy: {
|
||||
assignedAt: 'desc',
|
||||
|
||||
@@ -18,7 +18,7 @@ export class CrossNodeConsensusService {
|
||||
const tasks = [];
|
||||
|
||||
for (const nodeId of nodeIds) {
|
||||
const node = await prisma.dscmNode.findUnique({
|
||||
const node = await prisma.dscm_nodes.findUnique({
|
||||
where: { nodeId },
|
||||
});
|
||||
|
||||
@@ -27,7 +27,7 @@ export class CrossNodeConsensusService {
|
||||
}
|
||||
|
||||
// Create task on this node
|
||||
const task = await prisma.computeTask.create({
|
||||
const task = await prisma.compute_tasks.create({
|
||||
data: {
|
||||
taskId: `CONSENSUS-TASK-${nodeId}-${Date.now()}`,
|
||||
nodeId,
|
||||
@@ -55,7 +55,7 @@ export class CrossNodeConsensusService {
|
||||
|
||||
// Update all tasks with consensus result
|
||||
for (const task of tasks) {
|
||||
await prisma.computeTask.update({
|
||||
await prisma.compute_tasks.update({
|
||||
where: { taskId: task.taskId },
|
||||
data: {
|
||||
status: 'completed',
|
||||
|
||||
@@ -23,7 +23,7 @@ export class DscmNodeManagerService {
|
||||
async registerNode(request: NodeRegistrationRequest) {
|
||||
const nodeId = `DSCM-${request.nodeType}-${uuidv4()}`;
|
||||
|
||||
return await prisma.dscmNode.create({
|
||||
return await prisma.dscm_nodes.create({
|
||||
data: {
|
||||
nodeId,
|
||||
sovereignBankId: request.sovereignBankId,
|
||||
@@ -44,7 +44,7 @@ export class DscmNodeManagerService {
|
||||
* Update node heartbeat
|
||||
*/
|
||||
async updateHeartbeat(nodeId: string) {
|
||||
return await prisma.dscmNode.update({
|
||||
return await prisma.dscm_nodes.update({
|
||||
where: { nodeId },
|
||||
data: {
|
||||
lastHeartbeat: new Date(),
|
||||
@@ -56,7 +56,7 @@ export class DscmNodeManagerService {
|
||||
* Get node by ID
|
||||
*/
|
||||
async getNode(nodeId: string) {
|
||||
return await prisma.dscmNode.findUnique({
|
||||
return await prisma.dscm_nodes.findUnique({
|
||||
where: { nodeId },
|
||||
});
|
||||
}
|
||||
@@ -65,7 +65,7 @@ export class DscmNodeManagerService {
|
||||
* Get nodes by type
|
||||
*/
|
||||
async getNodesByType(nodeType: string) {
|
||||
return await prisma.dscmNode.findMany({
|
||||
return await prisma.dscm_nodes.findMany({
|
||||
where: {
|
||||
nodeType,
|
||||
status: 'active',
|
||||
@@ -77,7 +77,7 @@ export class DscmNodeManagerService {
|
||||
* Get nodes for sovereign bank
|
||||
*/
|
||||
async getNodesForBank(sovereignBankId: string) {
|
||||
return await prisma.dscmNode.findMany({
|
||||
return await prisma.dscm_nodes.findMany({
|
||||
where: {
|
||||
sovereignBankId,
|
||||
status: 'active',
|
||||
@@ -89,7 +89,7 @@ export class DscmNodeManagerService {
|
||||
* Update node status
|
||||
*/
|
||||
async updateNodeStatus(nodeId: string, status: string) {
|
||||
return await prisma.dscmNode.update({
|
||||
return await prisma.dscm_nodes.update({
|
||||
where: { nodeId },
|
||||
data: { status },
|
||||
});
|
||||
|
||||
@@ -23,7 +23,7 @@ export class FederatedAiService {
|
||||
nodeIds = request.participatingNodeIds;
|
||||
} else {
|
||||
// Get all active nodes for federated computation
|
||||
const nodes = await prisma.dscmNode.findMany({
|
||||
const nodes = await prisma.dscm_nodes.findMany({
|
||||
where: {
|
||||
status: 'active',
|
||||
nodeType: {
|
||||
@@ -43,7 +43,7 @@ export class FederatedAiService {
|
||||
const primaryNodeId = nodeIds[0];
|
||||
const taskId = `FED-AI-${uuidv4()}`;
|
||||
|
||||
const task = await prisma.federatedAiTask.create({
|
||||
const task = await prisma.federated_ai_tasks.create({
|
||||
data: {
|
||||
taskId,
|
||||
nodeId: primaryNodeId,
|
||||
@@ -59,7 +59,7 @@ export class FederatedAiService {
|
||||
// For now, simulate consensus result
|
||||
const consensusResult = await this.reachConsensus(taskId, nodeIds);
|
||||
|
||||
await prisma.federatedAiTask.update({
|
||||
await prisma.federated_ai_tasks.update({
|
||||
where: { taskId },
|
||||
data: {
|
||||
status: 'consensus_reached',
|
||||
@@ -92,7 +92,7 @@ export class FederatedAiService {
|
||||
* Get federated AI task
|
||||
*/
|
||||
async getTask(taskId: string) {
|
||||
return await prisma.federatedAiTask.findUnique({
|
||||
return await prisma.federated_ai_tasks.findUnique({
|
||||
where: { taskId },
|
||||
include: {
|
||||
node: true,
|
||||
@@ -104,7 +104,7 @@ export class FederatedAiService {
|
||||
* Get tasks by AI type
|
||||
*/
|
||||
async getTasksByAiType(aiType: string) {
|
||||
return await prisma.federatedAiTask.findMany({
|
||||
return await prisma.federated_ai_tasks.findMany({
|
||||
where: {
|
||||
aiType,
|
||||
},
|
||||
|
||||
@@ -34,12 +34,12 @@ export class GpuEdgeDeploymentService {
|
||||
const deploymentId = `GPU-DEPLOY-${uuidv4()}`;
|
||||
|
||||
// Step 1: Verify or create region
|
||||
let region = await prisma.gpuEdgeRegion.findUnique({
|
||||
let region = await prisma.gpu_edge_regions.findUnique({
|
||||
where: { regionId: request.regionId },
|
||||
});
|
||||
|
||||
if (!region) {
|
||||
region = await prisma.gpuEdgeRegion.create({
|
||||
region = await prisma.gpu_edge_regions.create({
|
||||
data: {
|
||||
regionId: request.regionId,
|
||||
regionName: `Region ${request.regionId}`,
|
||||
@@ -65,7 +65,7 @@ export class GpuEdgeDeploymentService {
|
||||
}
|
||||
|
||||
// Step 3: Create deployment record
|
||||
const deployment = await prisma.gpuEdgeDeployment.create({
|
||||
const deployment = await prisma.gpu_edge_deployments.create({
|
||||
data: {
|
||||
deploymentId,
|
||||
regionId: request.regionId,
|
||||
@@ -93,7 +93,7 @@ export class GpuEdgeDeploymentService {
|
||||
* Get deployment by ID
|
||||
*/
|
||||
async getDeployment(deploymentId: string) {
|
||||
return await prisma.gpuEdgeDeployment.findUnique({
|
||||
return await prisma.gpu_edge_deployments.findUnique({
|
||||
where: { deploymentId },
|
||||
include: {
|
||||
region: true,
|
||||
@@ -105,7 +105,7 @@ export class GpuEdgeDeploymentService {
|
||||
* Get deployments for region
|
||||
*/
|
||||
async getDeploymentsForRegion(regionId: string) {
|
||||
return await prisma.gpuEdgeDeployment.findMany({
|
||||
return await prisma.gpu_edge_deployments.findMany({
|
||||
where: { regionId },
|
||||
orderBy: { deployedAt: 'desc' },
|
||||
});
|
||||
@@ -115,7 +115,7 @@ export class GpuEdgeDeploymentService {
|
||||
* Get all active regions
|
||||
*/
|
||||
async getAllActiveRegions() {
|
||||
return await prisma.gpuEdgeRegion.findMany({
|
||||
return await prisma.gpu_edge_regions.findMany({
|
||||
where: { status: 'active' },
|
||||
orderBy: { regionName: 'asc' },
|
||||
});
|
||||
|
||||
@@ -19,7 +19,7 @@ export class GpuEdgeMonitoringService {
|
||||
* Perform health check on GPU edge node
|
||||
*/
|
||||
async performHealthCheck(nodeId: string): Promise<GpuEdgeHealthCheck> {
|
||||
const node = await prisma.gpuEdgeNode.findUnique({
|
||||
const node = await prisma.gpu_edge_nodes.findUnique({
|
||||
where: { nodeId },
|
||||
});
|
||||
|
||||
@@ -54,7 +54,7 @@ export class GpuEdgeMonitoringService {
|
||||
}
|
||||
|
||||
// Save health check record
|
||||
await prisma.gpuEdgeTask.create({
|
||||
await prisma.gpu_edge_tasks.create({
|
||||
data: {
|
||||
taskId: `HEALTH-${Date.now()}`,
|
||||
nodeId,
|
||||
@@ -84,7 +84,7 @@ export class GpuEdgeMonitoringService {
|
||||
* Get monitoring metrics for node
|
||||
*/
|
||||
async getNodeMetrics(nodeId: string) {
|
||||
const tasks = await prisma.gpuEdgeTask.findMany({
|
||||
const tasks = await prisma.gpu_edge_tasks.findMany({
|
||||
where: { nodeId },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 100,
|
||||
@@ -107,7 +107,7 @@ export class GpuEdgeMonitoringService {
|
||||
* Get monitoring metrics for region
|
||||
*/
|
||||
async getRegionMetrics(regionId: string) {
|
||||
const nodes = await prisma.gpuEdgeNode.findMany({
|
||||
const nodes = await prisma.gpu_edge_nodes.findMany({
|
||||
where: { regionId },
|
||||
});
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ export class GpuEdgeNodeService {
|
||||
|
||||
const nodeId = `GPU-EDGE-${uuidv4()}`;
|
||||
|
||||
const node = await prisma.gpuEdgeNode.create({
|
||||
const node = await prisma.gpu_edge_nodes.create({
|
||||
data: {
|
||||
nodeId,
|
||||
nodeType: request.nodeType,
|
||||
@@ -65,7 +65,7 @@ export class GpuEdgeNodeService {
|
||||
* Get node by ID
|
||||
*/
|
||||
async getNode(nodeId: string) {
|
||||
return await prisma.gpuEdgeNode.findUnique({
|
||||
return await prisma.gpu_edge_nodes.findUnique({
|
||||
where: { nodeId },
|
||||
include: {
|
||||
region: true,
|
||||
@@ -78,7 +78,7 @@ export class GpuEdgeNodeService {
|
||||
* Get nodes by type
|
||||
*/
|
||||
async getNodesByType(nodeType: string) {
|
||||
return await prisma.gpuEdgeNode.findMany({
|
||||
return await prisma.gpu_edge_nodes.findMany({
|
||||
where: { nodeType, status: 'active' },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
});
|
||||
@@ -88,7 +88,7 @@ export class GpuEdgeNodeService {
|
||||
* Get nodes for region
|
||||
*/
|
||||
async getNodesForRegion(regionId: string) {
|
||||
return await prisma.gpuEdgeNode.findMany({
|
||||
return await prisma.gpu_edge_nodes.findMany({
|
||||
where: { regionId, status: 'active' },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
});
|
||||
@@ -98,7 +98,7 @@ export class GpuEdgeNodeService {
|
||||
* Update node status
|
||||
*/
|
||||
async updateNodeStatus(nodeId: string, status: string) {
|
||||
return await prisma.gpuEdgeNode.update({
|
||||
return await prisma.gpu_edge_nodes.update({
|
||||
where: { nodeId },
|
||||
data: { status },
|
||||
});
|
||||
|
||||
@@ -33,14 +33,14 @@ export class GpuEdgeRoutingService {
|
||||
const latencyRequirement = request.latencyRequirement || 1; // <1ms default
|
||||
|
||||
// Step 1: Get available nodes in source and target regions
|
||||
const sourceNodes = await prisma.gpuEdgeNode.findMany({
|
||||
const sourceNodes = await prisma.gpu_edge_nodes.findMany({
|
||||
where: {
|
||||
regionId: request.sourceRegionId,
|
||||
status: 'active',
|
||||
},
|
||||
});
|
||||
|
||||
const targetNodes = await prisma.gpuEdgeNode.findMany({
|
||||
const targetNodes = await prisma.gpu_edge_nodes.findMany({
|
||||
where: {
|
||||
regionId: request.targetRegionId,
|
||||
status: 'active',
|
||||
@@ -83,7 +83,7 @@ export class GpuEdgeRoutingService {
|
||||
// Step 7: Create route record
|
||||
const routeId = `GPU-ROUTE-${uuidv4()}`;
|
||||
|
||||
await prisma.gpuEdgeNetwork.create({
|
||||
await prisma.gpu_edge_networks.create({
|
||||
data: {
|
||||
routeId,
|
||||
sourceRegionId: request.sourceRegionId,
|
||||
@@ -115,7 +115,7 @@ export class GpuEdgeRoutingService {
|
||||
* Get route by ID
|
||||
*/
|
||||
async getRoute(routeId: string) {
|
||||
return await prisma.gpuEdgeNetwork.findUnique({
|
||||
return await prisma.gpu_edge_networks.findUnique({
|
||||
where: { routeId },
|
||||
});
|
||||
}
|
||||
@@ -127,7 +127,7 @@ export class GpuEdgeRoutingService {
|
||||
sourceRegionId: string,
|
||||
targetRegionId: string
|
||||
) {
|
||||
return await prisma.gpuEdgeNetwork.findMany({
|
||||
return await prisma.gpu_edge_networks.findMany({
|
||||
where: {
|
||||
sourceRegionId,
|
||||
targetRegionId,
|
||||
|
||||
103
src/infrastructure/events/solacenet-events.ts
Normal file
103
src/infrastructure/events/solacenet-events.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
// SolaceNet Event Definitions
|
||||
// Event types for capability lifecycle events
|
||||
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
export interface CapabilityEnabledEvent {
|
||||
capabilityId: string;
|
||||
tenantId?: string;
|
||||
programId?: string;
|
||||
region?: string;
|
||||
channel?: string;
|
||||
actor: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface CapabilityDisabledEvent {
|
||||
capabilityId: string;
|
||||
tenantId?: string;
|
||||
programId?: string;
|
||||
region?: string;
|
||||
channel?: string;
|
||||
actor: string;
|
||||
reason?: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface CapabilityToggledEvent {
|
||||
capabilityId: string;
|
||||
beforeState: string;
|
||||
afterState: string;
|
||||
tenantId?: string;
|
||||
programId?: string;
|
||||
region?: string;
|
||||
channel?: string;
|
||||
actor: string;
|
||||
reason?: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface PolicyDecisionEvent {
|
||||
decisionId: string;
|
||||
capabilityId: string;
|
||||
tenantId: string;
|
||||
programId?: string;
|
||||
allowed: boolean;
|
||||
mode: string;
|
||||
reasonCode?: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface KillSwitchActivatedEvent {
|
||||
capabilityId: string;
|
||||
actor: string;
|
||||
reason?: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
// Event emitter for SolaceNet events
|
||||
export const solacenetEventEmitter = new EventEmitter();
|
||||
|
||||
// Event type constants
|
||||
export const SOLACENET_EVENTS = {
|
||||
CAPABILITY_ENABLED: 'capability.enabled',
|
||||
CAPABILITY_DISABLED: 'capability.disabled',
|
||||
CAPABILITY_TOGGLED: 'capability.toggled',
|
||||
POLICY_DECISION: 'policy.decision',
|
||||
KILL_SWITCH_ACTIVATED: 'kill-switch.activated',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Emit capability enabled event
|
||||
*/
|
||||
export function emitCapabilityEnabled(event: CapabilityEnabledEvent): void {
|
||||
solacenetEventEmitter.emit(SOLACENET_EVENTS.CAPABILITY_ENABLED, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit capability disabled event
|
||||
*/
|
||||
export function emitCapabilityDisabled(event: CapabilityDisabledEvent): void {
|
||||
solacenetEventEmitter.emit(SOLACENET_EVENTS.CAPABILITY_DISABLED, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit capability toggled event
|
||||
*/
|
||||
export function emitCapabilityToggled(event: CapabilityToggledEvent): void {
|
||||
solacenetEventEmitter.emit(SOLACENET_EVENTS.CAPABILITY_TOGGLED, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit policy decision event
|
||||
*/
|
||||
export function emitPolicyDecision(event: PolicyDecisionEvent): void {
|
||||
solacenetEventEmitter.emit(SOLACENET_EVENTS.POLICY_DECISION, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit kill switch activated event
|
||||
*/
|
||||
export function emitKillSwitchActivated(event: KillSwitchActivatedEvent): void {
|
||||
solacenetEventEmitter.emit(SOLACENET_EVENTS.KILL_SWITCH_ACTIVATED, event);
|
||||
}
|
||||
252
src/infrastructure/monitoring/as4-metrics.service.ts
Normal file
252
src/infrastructure/monitoring/as4-metrics.service.ts
Normal file
@@ -0,0 +1,252 @@
|
||||
// AS4 Settlement Metrics Service
|
||||
// Exposes Prometheus metrics for AS4 Settlement
|
||||
|
||||
import { logger } from './logger';
|
||||
|
||||
export interface As4Metrics {
|
||||
// Message Processing
|
||||
messagesReceived: number;
|
||||
messagesProcessed: number;
|
||||
messagesFailed: number;
|
||||
messageLatencyP50: number;
|
||||
messageLatencyP99: number;
|
||||
|
||||
// Instructions
|
||||
instructionsReceived: number;
|
||||
instructionsAccepted: number;
|
||||
instructionsRejected: number;
|
||||
instructionsPosted: number;
|
||||
instructionsQueued: number;
|
||||
|
||||
// Members
|
||||
activeMembers: number;
|
||||
totalMembers: number;
|
||||
|
||||
// Certificates
|
||||
certificatesExpiringSoon: number;
|
||||
certificatesExpired: number;
|
||||
|
||||
// System
|
||||
databaseConnected: number;
|
||||
redisConnected: number;
|
||||
systemUptime: number;
|
||||
}
|
||||
|
||||
export class As4MetricsService {
|
||||
private metrics: As4Metrics = {
|
||||
messagesReceived: 0,
|
||||
messagesProcessed: 0,
|
||||
messagesFailed: 0,
|
||||
messageLatencyP50: 0,
|
||||
messageLatencyP99: 0,
|
||||
instructionsReceived: 0,
|
||||
instructionsAccepted: 0,
|
||||
instructionsRejected: 0,
|
||||
instructionsPosted: 0,
|
||||
instructionsQueued: 0,
|
||||
activeMembers: 0,
|
||||
totalMembers: 0,
|
||||
certificatesExpiringSoon: 0,
|
||||
certificatesExpired: 0,
|
||||
databaseConnected: 0,
|
||||
redisConnected: 0,
|
||||
systemUptime: 0,
|
||||
};
|
||||
|
||||
/**
|
||||
* Increment message received counter
|
||||
*/
|
||||
incrementMessagesReceived(): void {
|
||||
this.metrics.messagesReceived++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment message processed counter
|
||||
*/
|
||||
incrementMessagesProcessed(): void {
|
||||
this.metrics.messagesProcessed++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment message failed counter
|
||||
*/
|
||||
incrementMessagesFailed(): void {
|
||||
this.metrics.messagesFailed++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Record message latency
|
||||
*/
|
||||
recordMessageLatency(latencyMs: number): void {
|
||||
// TODO: Implement proper percentile calculation
|
||||
// For now, simple average
|
||||
this.metrics.messageLatencyP99 = latencyMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment instruction received
|
||||
*/
|
||||
incrementInstructionsReceived(): void {
|
||||
this.metrics.instructionsReceived++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment instruction accepted
|
||||
*/
|
||||
incrementInstructionsAccepted(): void {
|
||||
this.metrics.instructionsAccepted++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment instruction rejected
|
||||
*/
|
||||
incrementInstructionsRejected(): void {
|
||||
this.metrics.instructionsRejected++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment instruction posted
|
||||
*/
|
||||
incrementInstructionsPosted(): void {
|
||||
this.metrics.instructionsPosted++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set instruction queue length
|
||||
*/
|
||||
setInstructionsQueued(count: number): void {
|
||||
this.metrics.instructionsQueued = count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update member counts
|
||||
*/
|
||||
updateMemberCounts(active: number, total: number): void {
|
||||
this.metrics.activeMembers = active;
|
||||
this.metrics.totalMembers = total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update certificate expiration counts
|
||||
*/
|
||||
updateCertificateCounts(expiringSoon: number, expired: number): void {
|
||||
this.metrics.certificatesExpiringSoon = expiringSoon;
|
||||
this.metrics.certificatesExpired = expired;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update connection status
|
||||
*/
|
||||
updateConnectionStatus(database: number, redis: number): void {
|
||||
this.metrics.databaseConnected = database;
|
||||
this.metrics.redisConnected = redis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current metrics
|
||||
*/
|
||||
getMetrics(): As4Metrics {
|
||||
return { ...this.metrics };
|
||||
}
|
||||
|
||||
/**
|
||||
* Export metrics in Prometheus format
|
||||
*/
|
||||
exportPrometheusFormat(): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
// Message metrics
|
||||
lines.push(`# HELP as4_messages_received_total Total messages received`);
|
||||
lines.push(`# TYPE as4_messages_received_total counter`);
|
||||
lines.push(`as4_messages_received_total ${this.metrics.messagesReceived}`);
|
||||
|
||||
lines.push(`# HELP as4_messages_processed_total Total messages processed`);
|
||||
lines.push(`# TYPE as4_messages_processed_total counter`);
|
||||
lines.push(`as4_messages_processed_total ${this.metrics.messagesProcessed}`);
|
||||
|
||||
lines.push(`# HELP as4_messages_failed_total Total messages failed`);
|
||||
lines.push(`# TYPE as4_messages_failed_total counter`);
|
||||
lines.push(`as4_messages_failed_total ${this.metrics.messagesFailed}`);
|
||||
|
||||
lines.push(`# HELP as4_message_latency_p99 Message processing latency (P99) in seconds`);
|
||||
lines.push(`# TYPE as4_message_latency_p99 gauge`);
|
||||
lines.push(`as4_message_latency_p99 ${this.metrics.messageLatencyP99 / 1000}`);
|
||||
|
||||
// Instruction metrics
|
||||
lines.push(`# HELP as4_instructions_received_total Total instructions received`);
|
||||
lines.push(`# TYPE as4_instructions_received_total counter`);
|
||||
lines.push(`as4_instructions_received_total ${this.metrics.instructionsReceived}`);
|
||||
|
||||
lines.push(`# HELP as4_instructions_accepted_total Total instructions accepted`);
|
||||
lines.push(`# TYPE as4_instructions_accepted_total counter`);
|
||||
lines.push(`as4_instructions_accepted_total ${this.metrics.instructionsAccepted}`);
|
||||
|
||||
lines.push(`# HELP as4_instructions_rejected_total Total instructions rejected`);
|
||||
lines.push(`# TYPE as4_instructions_rejected_total counter`);
|
||||
lines.push(`as4_instructions_rejected_total ${this.metrics.instructionsRejected}`);
|
||||
|
||||
lines.push(`# HELP as4_instructions_posted_total Total instructions posted`);
|
||||
lines.push(`# TYPE as4_instructions_posted_total counter`);
|
||||
lines.push(`as4_instructions_posted_total ${this.metrics.instructionsPosted}`);
|
||||
|
||||
lines.push(`# HELP as4_instructions_queued Current instruction queue length`);
|
||||
lines.push(`# TYPE as4_instructions_queued gauge`);
|
||||
lines.push(`as4_instructions_queued ${this.metrics.instructionsQueued}`);
|
||||
|
||||
// Member metrics
|
||||
lines.push(`# HELP as4_members_active Current active members`);
|
||||
lines.push(`# TYPE as4_members_active gauge`);
|
||||
lines.push(`as4_members_active ${this.metrics.activeMembers}`);
|
||||
|
||||
lines.push(`# HELP as4_members_total Total members`);
|
||||
lines.push(`# TYPE as4_members_total gauge`);
|
||||
lines.push(`as4_members_total ${this.metrics.totalMembers}`);
|
||||
|
||||
// Certificate metrics
|
||||
lines.push(`# HELP as4_certificates_expiring_soon Certificates expiring within 30 days`);
|
||||
lines.push(`# TYPE as4_certificates_expiring_soon gauge`);
|
||||
lines.push(`as4_certificates_expiring_soon ${this.metrics.certificatesExpiringSoon}`);
|
||||
|
||||
lines.push(`# HELP as4_certificates_expired Expired certificates`);
|
||||
lines.push(`# TYPE as4_certificates_expired gauge`);
|
||||
lines.push(`as4_certificates_expired ${this.metrics.certificatesExpired}`);
|
||||
|
||||
// Connection metrics
|
||||
lines.push(`# HELP as4_database_connection_status Database connection status (1=connected, 0=disconnected)`);
|
||||
lines.push(`# TYPE as4_database_connection_status gauge`);
|
||||
lines.push(`as4_database_connection_status ${this.metrics.databaseConnected}`);
|
||||
|
||||
lines.push(`# HELP as4_redis_connection_status Redis connection status (1=connected, 0=disconnected)`);
|
||||
lines.push(`# TYPE as4_redis_connection_status gauge`);
|
||||
lines.push(`as4_redis_connection_status ${this.metrics.redisConnected}`);
|
||||
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset metrics (for testing)
|
||||
*/
|
||||
reset(): void {
|
||||
this.metrics = {
|
||||
messagesReceived: 0,
|
||||
messagesProcessed: 0,
|
||||
messagesFailed: 0,
|
||||
messageLatencyP50: 0,
|
||||
messageLatencyP99: 0,
|
||||
instructionsReceived: 0,
|
||||
instructionsAccepted: 0,
|
||||
instructionsRejected: 0,
|
||||
instructionsPosted: 0,
|
||||
instructionsQueued: 0,
|
||||
activeMembers: 0,
|
||||
totalMembers: 0,
|
||||
certificatesExpiringSoon: 0,
|
||||
certificatesExpired: 0,
|
||||
databaseConnected: 0,
|
||||
redisConnected: 0,
|
||||
systemUptime: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const as4MetricsService = new As4MetricsService();
|
||||
@@ -24,10 +24,10 @@ class MetricsService {
|
||||
*/
|
||||
recordRequestDuration(path: string, method: string, duration: number): void {
|
||||
const key = `${method}:${path}`;
|
||||
if (!this.metrics.requestDuration.has(key)) {
|
||||
this.metrics.requestDuration.set(key, []);
|
||||
if (!this.behavioral_metrics.requestDuration.has(key)) {
|
||||
this.behavioral_metrics.requestDuration.set(key, []);
|
||||
}
|
||||
this.metrics.requestDuration.get(key)?.push(duration);
|
||||
this.behavioral_metrics.requestDuration.get(key)?.push(duration);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -35,18 +35,18 @@ class MetricsService {
|
||||
*/
|
||||
recordError(path: string, method: string, statusCode: number): void {
|
||||
const key = `${method}:${path}:${statusCode}`;
|
||||
const current = this.metrics.errorCount.get(key) || 0;
|
||||
this.metrics.errorCount.set(key, current + 1);
|
||||
const current = this.behavioral_metrics.errorCount.get(key) || 0;
|
||||
this.behavioral_metrics.errorCount.set(key, current + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Record database query duration
|
||||
*/
|
||||
recordDbQuery(operation: string, duration: number): void {
|
||||
if (!this.metrics.dbQueryDuration.has(operation)) {
|
||||
this.metrics.dbQueryDuration.set(operation, []);
|
||||
if (!this.behavioral_metrics.dbQueryDuration.has(operation)) {
|
||||
this.behavioral_metrics.dbQueryDuration.set(operation, []);
|
||||
}
|
||||
this.metrics.dbQueryDuration.get(operation)?.push(duration);
|
||||
this.behavioral_metrics.dbQueryDuration.get(operation)?.push(duration);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -54,7 +54,7 @@ class MetricsService {
|
||||
*/
|
||||
getAverageRequestDuration(path: string, method: string): number {
|
||||
const key = `${method}:${path}`;
|
||||
const durations = this.metrics.requestDuration.get(key) || [];
|
||||
const durations = this.behavioral_metrics.requestDuration.get(key) || [];
|
||||
if (durations.length === 0) return 0;
|
||||
return durations.reduce((a, b) => a + b, 0) / durations.length;
|
||||
}
|
||||
@@ -64,7 +64,7 @@ class MetricsService {
|
||||
*/
|
||||
getErrorCount(path: string, method: string, statusCode: number): number {
|
||||
const key = `${method}:${path}:${statusCode}`;
|
||||
return this.metrics.errorCount.get(key) || 0;
|
||||
return this.behavioral_metrics.errorCount.get(key) || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,19 +76,19 @@ class MetricsService {
|
||||
dbQueryDurations: Record<string, number>;
|
||||
} {
|
||||
const requestDurations: Record<string, number> = {};
|
||||
for (const [key, durations] of this.metrics.requestDuration.entries()) {
|
||||
for (const [key, durations] of this.behavioral_metrics.requestDuration.entries()) {
|
||||
if (durations.length > 0) {
|
||||
requestDurations[key] = durations.reduce((a, b) => a + b, 0) / durations.length;
|
||||
}
|
||||
}
|
||||
|
||||
const errorCounts: Record<string, number> = {};
|
||||
for (const [key, count] of this.metrics.errorCount.entries()) {
|
||||
for (const [key, count] of this.behavioral_metrics.errorCount.entries()) {
|
||||
errorCounts[key] = count;
|
||||
}
|
||||
|
||||
const dbQueryDurations: Record<string, number> = {};
|
||||
for (const [operation, durations] of this.metrics.dbQueryDuration.entries()) {
|
||||
for (const [operation, durations] of this.behavioral_metrics.dbQueryDuration.entries()) {
|
||||
if (durations.length > 0) {
|
||||
dbQueryDurations[operation] = durations.reduce((a, b) => a + b, 0) / durations.length;
|
||||
}
|
||||
@@ -105,7 +105,7 @@ class MetricsService {
|
||||
* Reset metrics (useful for testing)
|
||||
*/
|
||||
reset(): void {
|
||||
this.metrics = {
|
||||
this.behavioral_metrics = {
|
||||
requestDuration: new Map(),
|
||||
errorCount: new Map(),
|
||||
dbQueryDuration: new Map(),
|
||||
|
||||
72
src/infrastructure/monitoring/solacenet-metrics.ts
Normal file
72
src/infrastructure/monitoring/solacenet-metrics.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
// SolaceNet Metrics Collection
|
||||
// Prometheus metrics for monitoring
|
||||
|
||||
// Note: In production, use a proper Prometheus client library
|
||||
// This is a simplified example
|
||||
|
||||
export class SolaceNetMetrics {
|
||||
private static instance: SolaceNetMetrics;
|
||||
private metrics: Map<string, number> = new Map();
|
||||
|
||||
static getInstance(): SolaceNetMetrics {
|
||||
if (!SolaceNetMetrics.instance) {
|
||||
SolaceNetMetrics.instance = new SolaceNetMetrics();
|
||||
}
|
||||
return SolaceNetMetrics.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment capability toggle counter
|
||||
*/
|
||||
incrementCapabilityToggle(capabilityId: string, action: string): void {
|
||||
const key = `solacenet_capability_toggles_total{capability_id="${capabilityId}",action="${action}"}`;
|
||||
this.behavioral_metrics.set(key, (this.behavioral_metrics.get(key) || 0) + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Record policy decision latency
|
||||
*/
|
||||
recordPolicyDecisionLatency(durationMs: number): void {
|
||||
const key = 'solacenet_policy_decision_duration_seconds';
|
||||
// In production, use histogram
|
||||
this.behavioral_metrics.set(key, durationMs / 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Record risk score
|
||||
*/
|
||||
recordRiskScore(score: number, transactionId?: string): void {
|
||||
const key = transactionId
|
||||
? `solacenet_risk_score{transaction_id="${transactionId}"}`
|
||||
: 'solacenet_risk_score';
|
||||
this.behavioral_metrics.set(key, score);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment kill switch activations
|
||||
*/
|
||||
incrementKillSwitch(capabilityId: string): void {
|
||||
const key = `solacenet_kill_switch_activations_total{capability_id="${capabilityId}"}`;
|
||||
this.behavioral_metrics.set(key, (this.behavioral_metrics.get(key) || 0) + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get metrics in Prometheus format
|
||||
*/
|
||||
getMetrics(): string {
|
||||
const lines: string[] = [];
|
||||
for (const [key, value] of this.behavioral_metrics.entries()) {
|
||||
lines.push(`${key} ${value}`);
|
||||
}
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset metrics (for testing)
|
||||
*/
|
||||
reset(): void {
|
||||
this.behavioral_metrics.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export const solacenetMetrics = SolaceNetMetrics.getInstance();
|
||||
52
src/infrastructure/monitoring/tracing.middleware.ts
Normal file
52
src/infrastructure/monitoring/tracing.middleware.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
// Tracing Middleware
|
||||
// Adds distributed tracing to Express requests
|
||||
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { tracingService } from './tracing.service';
|
||||
|
||||
export interface TracedRequest extends Request {
|
||||
traceContext?: {
|
||||
traceId: string;
|
||||
spanId: string;
|
||||
parentSpanId?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Tracing middleware
|
||||
* Creates a span for each HTTP request
|
||||
*/
|
||||
export function tracingMiddleware(req: TracedRequest, res: Response, next: NextFunction): void {
|
||||
// Extract trace context from headers
|
||||
const parentContext = tracingService.extractTraceContext(req.headers);
|
||||
|
||||
// Start new span for this request
|
||||
const context = tracingService.startSpan(
|
||||
`${req.method} ${req.path}`,
|
||||
parentContext?.traceId,
|
||||
parentContext?.spanId,
|
||||
{
|
||||
'http.method': req.method,
|
||||
'http.path': req.path,
|
||||
'http.url': req.url,
|
||||
'http.user_agent': req.get('user-agent'),
|
||||
'http.remote_ip': req.ip,
|
||||
}
|
||||
);
|
||||
|
||||
req.traceContext = context;
|
||||
|
||||
// Inject trace context into response headers
|
||||
const traceHeaders = tracingService.injectTraceContext(context);
|
||||
Object.entries(traceHeaders).forEach(([key, value]) => {
|
||||
res.setHeader(key, value);
|
||||
});
|
||||
|
||||
// End span when response finishes
|
||||
res.on('finish', () => {
|
||||
const status = res.statusCode >= 400 ? 'error' : 'ok';
|
||||
tracingService.endSpan(context.traceId, context.spanId, status);
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
308
src/infrastructure/monitoring/tracing.service.ts
Normal file
308
src/infrastructure/monitoring/tracing.service.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
// Distributed Tracing Service
|
||||
// OpenTelemetry integration for request correlation and distributed tracing
|
||||
|
||||
import { logger } from '@/infrastructure/monitoring/logger';
|
||||
|
||||
export interface TraceContext {
|
||||
traceId: string;
|
||||
spanId: string;
|
||||
parentSpanId?: string;
|
||||
baggage?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface Span {
|
||||
name: string;
|
||||
startTime: number;
|
||||
endTime?: number;
|
||||
attributes: Record<string, any>;
|
||||
events: Array<{
|
||||
name: string;
|
||||
timestamp: number;
|
||||
attributes?: Record<string, any>;
|
||||
}>;
|
||||
status: 'ok' | 'error';
|
||||
error?: Error;
|
||||
parentSpanId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tracing Service
|
||||
* Provides distributed tracing capabilities using OpenTelemetry patterns
|
||||
*/
|
||||
export class TracingService {
|
||||
private spans: Map<string, Span> = new Map();
|
||||
private activeSpans: Map<string, string> = new Map(); // traceId -> spanId
|
||||
|
||||
/**
|
||||
* Start a new span
|
||||
*/
|
||||
startSpan(
|
||||
name: string,
|
||||
traceId?: string,
|
||||
parentSpanId?: string,
|
||||
attributes?: Record<string, any>
|
||||
): TraceContext {
|
||||
const generatedTraceId = traceId || this.generateTraceId();
|
||||
const spanId = this.generateSpanId();
|
||||
|
||||
const span: Span = {
|
||||
name,
|
||||
startTime: Date.now(),
|
||||
attributes: {
|
||||
...attributes,
|
||||
'span.name': name,
|
||||
'span.kind': 'internal',
|
||||
},
|
||||
events: [],
|
||||
status: 'ok',
|
||||
};
|
||||
|
||||
const spanKey = `${generatedTraceId}:${spanId}`;
|
||||
this.spans.set(spanKey, span);
|
||||
this.activeSpans.set(generatedTraceId, spanId);
|
||||
|
||||
logger.debug('Span started', {
|
||||
traceId: generatedTraceId,
|
||||
spanId,
|
||||
name,
|
||||
parentSpanId,
|
||||
});
|
||||
|
||||
return {
|
||||
traceId: generatedTraceId,
|
||||
spanId,
|
||||
parentSpanId,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* End a span
|
||||
*/
|
||||
endSpan(traceId: string, spanId: string, status: 'ok' | 'error' = 'ok', error?: Error): void {
|
||||
const spanKey = `${traceId}:${spanId}`;
|
||||
const span = this.spans.get(spanKey);
|
||||
|
||||
if (!span) {
|
||||
logger.warn('Span not found', { traceId, spanId });
|
||||
return;
|
||||
}
|
||||
|
||||
span.endTime = Date.now();
|
||||
span.status = status;
|
||||
if (error) {
|
||||
span.error = error;
|
||||
span.attributes['error'] = true;
|
||||
span.attributes['error.message'] = error.message;
|
||||
span.attributes['error.stack'] = error.stack;
|
||||
}
|
||||
|
||||
// Calculate duration
|
||||
const duration = span.endTime - span.startTime;
|
||||
span.attributes['duration.ms'] = duration;
|
||||
|
||||
// Export span (in production, send to OpenTelemetry collector)
|
||||
this.exportSpan(traceId, spanId, span);
|
||||
|
||||
this.activeSpans.delete(traceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add event to span
|
||||
*/
|
||||
addEvent(traceId: string, spanId: string, name: string, attributes?: Record<string, any>): void {
|
||||
const spanKey = `${traceId}:${spanId}`;
|
||||
const span = this.spans.get(spanKey);
|
||||
|
||||
if (!span) {
|
||||
logger.warn('Span not found for event', { traceId, spanId });
|
||||
return;
|
||||
}
|
||||
|
||||
span.events.push({
|
||||
name,
|
||||
timestamp: Date.now(),
|
||||
attributes,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add attribute to span
|
||||
*/
|
||||
setAttribute(traceId: string, spanId: string, key: string, value: any): void {
|
||||
const spanKey = `${traceId}:${spanId}`;
|
||||
const span = this.spans.get(spanKey);
|
||||
|
||||
if (!span) {
|
||||
logger.warn('Span not found for attribute', { traceId, spanId });
|
||||
return;
|
||||
}
|
||||
|
||||
span.attributes[key] = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get trace context from headers
|
||||
*/
|
||||
extractTraceContext(headers: Record<string, string | string[] | undefined>): TraceContext | null {
|
||||
const traceId = this.getHeader(headers, 'x-trace-id') || this.getHeader(headers, 'traceparent');
|
||||
const spanId = this.getHeader(headers, 'x-span-id');
|
||||
const parentSpanId = this.getHeader(headers, 'x-parent-span-id');
|
||||
|
||||
if (!traceId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse W3C Trace Context format if present
|
||||
if (traceId.startsWith('00-')) {
|
||||
const parts = traceId.split('-');
|
||||
return {
|
||||
traceId: parts[1] || traceId,
|
||||
spanId: parts[2] || spanId || this.generateSpanId(),
|
||||
parentSpanId: parts[3] || parentSpanId || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
traceId,
|
||||
spanId: spanId || this.generateSpanId(),
|
||||
parentSpanId: parentSpanId || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Inject trace context into headers
|
||||
*/
|
||||
injectTraceContext(context: TraceContext): Record<string, string> {
|
||||
return {
|
||||
'x-trace-id': context.traceId,
|
||||
'x-span-id': context.spanId,
|
||||
'x-parent-span-id': context.parentSpanId || '',
|
||||
// W3C Trace Context format
|
||||
traceparent: `00-${context.traceId}-${context.spanId}-01`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Export span (in production, send to OpenTelemetry collector)
|
||||
*/
|
||||
private async exportSpan(traceId: string, spanId: string, span: Span): Promise<void> {
|
||||
// In production, this would send to OpenTelemetry collector
|
||||
// For now, log structured trace data
|
||||
logger.info('Span exported', {
|
||||
traceId,
|
||||
spanId,
|
||||
name: span.name,
|
||||
duration: span.endTime! - span.startTime,
|
||||
status: span.status,
|
||||
attributes: span.attributes,
|
||||
events: span.events.length,
|
||||
});
|
||||
|
||||
// Send to OpenTelemetry collector if configured
|
||||
const collectorUrl = process.env.OTEL_COLLECTOR_URL;
|
||||
if (collectorUrl) {
|
||||
try {
|
||||
const otelSpan = this.formatSpanForOTel(traceId, spanId, span);
|
||||
await fetch(`${collectorUrl}/v1/traces`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
resourceSpans: [
|
||||
{
|
||||
resource: {
|
||||
attributes: [
|
||||
{ key: 'service.name', value: { stringValue: 'dbis-iru' } },
|
||||
{ key: 'service.version', value: { stringValue: process.env.APP_VERSION || '1.0.0' } },
|
||||
],
|
||||
},
|
||||
scopeSpans: [
|
||||
{
|
||||
spans: [otelSpan],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}),
|
||||
}).catch((error) => {
|
||||
logger.warn('Failed to send span to OpenTelemetry collector', {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
logger.warn('OpenTelemetry export failed', {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format span for OpenTelemetry
|
||||
*/
|
||||
private formatSpanForOTel(traceId: string, spanId: string, span: Span): Record<string, unknown> {
|
||||
return {
|
||||
traceId: this.hexToBytes(traceId),
|
||||
spanId: this.hexToBytes(spanId),
|
||||
parentSpanId: span.parentSpanId ? this.hexToBytes(span.parentSpanId) : undefined,
|
||||
name: span.name,
|
||||
kind: 1, // INTERNAL
|
||||
startTimeUnixNano: span.startTime * 1000000,
|
||||
endTimeUnixNano: span.endTime ? span.endTime * 1000000 : undefined,
|
||||
attributes: Object.entries(span.attributes).map(([key, value]) => ({
|
||||
key,
|
||||
value: { stringValue: String(value) },
|
||||
})),
|
||||
events: span.events.map((event) => ({
|
||||
timeUnixNano: event.timestamp * 1000000,
|
||||
name: event.name,
|
||||
attributes: event.attributes
|
||||
? Object.entries(event.attributes).map(([key, value]) => ({
|
||||
key,
|
||||
value: { stringValue: String(value) },
|
||||
}))
|
||||
: [],
|
||||
})),
|
||||
status: {
|
||||
code: span.status === 'ok' ? 1 : 2, // OK = 1, ERROR = 2
|
||||
message: span.error?.message,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert hex string to bytes
|
||||
*/
|
||||
private hexToBytes(hex: string): string {
|
||||
// Convert hex string to base64 for OTel
|
||||
return Buffer.from(hex, 'hex').toString('base64');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate trace ID (W3C format)
|
||||
*/
|
||||
private generateTraceId(): string {
|
||||
// Generate 32-character hex string (16 bytes)
|
||||
return Array.from({ length: 32 }, () => Math.floor(Math.random() * 16).toString(16)).join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate span ID (W3C format)
|
||||
*/
|
||||
private generateSpanId(): string {
|
||||
// Generate 16-character hex string (8 bytes)
|
||||
return Array.from({ length: 16 }, () => Math.floor(Math.random() * 16).toString(16)).join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get header value
|
||||
*/
|
||||
private getHeader(headers: Record<string, string | string[] | undefined>, name: string): string | null {
|
||||
const value = headers[name.toLowerCase()];
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
return Array.isArray(value) ? value[0] : value;
|
||||
}
|
||||
}
|
||||
|
||||
export const tracingService = new TracingService();
|
||||
149
src/infrastructure/proxmox/proxmox-network.service.ts
Normal file
149
src/infrastructure/proxmox/proxmox-network.service.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
// Proxmox Network Management Service
|
||||
// Advanced network management for Proxmox VE
|
||||
|
||||
import { proxmoxVEIntegration } from './proxmox-ve-integration.service';
|
||||
import { logger } from '@/infrastructure/monitoring/logger';
|
||||
import { retryWithBackoff } from '@/shared/utils/retry';
|
||||
|
||||
export interface NetworkConfig {
|
||||
bridge: string;
|
||||
vlan?: number;
|
||||
subnet: string;
|
||||
gateway: string;
|
||||
dns?: string[];
|
||||
mtu?: number;
|
||||
}
|
||||
|
||||
export interface NetworkAllocation {
|
||||
containerId: string;
|
||||
vmid: number;
|
||||
network: NetworkConfig;
|
||||
ipAddress: string;
|
||||
}
|
||||
|
||||
export interface NetworkStats {
|
||||
bridge: string;
|
||||
interfaces: Array<{
|
||||
name: string;
|
||||
status: 'up' | 'down';
|
||||
speed?: string;
|
||||
mtu?: number;
|
||||
}>;
|
||||
traffic: {
|
||||
rx: number; // bytes received
|
||||
tx: number; // bytes transmitted
|
||||
};
|
||||
}
|
||||
|
||||
export class ProxmoxNetworkService {
|
||||
/**
|
||||
* Create network bridge
|
||||
*/
|
||||
async createBridge(name: string, config: NetworkConfig): Promise<void> {
|
||||
// Proxmox bridge creation via API
|
||||
logger.info('Creating network bridge', {
|
||||
name,
|
||||
config,
|
||||
});
|
||||
|
||||
// In production, this would use Proxmox API to create bridge
|
||||
// For now, log the action
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure VLAN
|
||||
*/
|
||||
async configureVLAN(bridge: string, vlan: number): Promise<void> {
|
||||
logger.info('Configuring VLAN', {
|
||||
bridge,
|
||||
vlan,
|
||||
});
|
||||
|
||||
// In production, this would configure VLAN on Proxmox bridge
|
||||
}
|
||||
|
||||
/**
|
||||
* Allocate network for container
|
||||
*/
|
||||
async allocateNetwork(containerId: string, vmid: number, config: NetworkConfig): Promise<NetworkAllocation> {
|
||||
// Generate IP address from subnet
|
||||
const ipAddress = this.generateIPFromSubnet(config.subnet);
|
||||
|
||||
logger.info('Network allocated for container', {
|
||||
containerId,
|
||||
vmid,
|
||||
ipAddress,
|
||||
config,
|
||||
});
|
||||
|
||||
return {
|
||||
containerId,
|
||||
vmid,
|
||||
network: config,
|
||||
ipAddress,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get network statistics
|
||||
*/
|
||||
async getNetworkStats(bridge: string): Promise<NetworkStats> {
|
||||
// In production, query Proxmox for network statistics
|
||||
return {
|
||||
bridge,
|
||||
interfaces: [],
|
||||
traffic: {
|
||||
rx: 0,
|
||||
tx: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate IP address from subnet
|
||||
*/
|
||||
private generateIPFromSubnet(subnet: string): string {
|
||||
// Simple IP generation (in production, use IPAM service)
|
||||
const [network, prefix] = subnet.split('/');
|
||||
const networkParts = network.split('.').map(Number);
|
||||
|
||||
// Generate random host IP
|
||||
const host = Math.floor(Math.random() * 254) + 1;
|
||||
return `${networkParts[0]}.${networkParts[1]}.${networkParts[2]}.${host}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure network QoS
|
||||
*/
|
||||
async configureQoS(bridge: string, limits: {
|
||||
bandwidth?: number; // Mbps
|
||||
latency?: number; // ms
|
||||
}): Promise<void> {
|
||||
logger.info('Configuring network QoS', {
|
||||
bridge,
|
||||
limits,
|
||||
});
|
||||
|
||||
// In production, configure QoS on Proxmox bridge
|
||||
}
|
||||
|
||||
/**
|
||||
* Monitor network health
|
||||
*/
|
||||
async monitorNetworkHealth(bridge: string): Promise<{
|
||||
healthy: boolean;
|
||||
latency: number;
|
||||
packetLoss: number;
|
||||
bandwidth: number;
|
||||
}> {
|
||||
// In production, monitor network health
|
||||
return {
|
||||
healthy: true,
|
||||
latency: 0,
|
||||
packetLoss: 0,
|
||||
bandwidth: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const proxmoxNetworkService = new ProxmoxNetworkService();
|
||||
308
src/infrastructure/proxmox/proxmox-ve-integration.service.ts
Normal file
308
src/infrastructure/proxmox/proxmox-ve-integration.service.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
// Proxmox VE Integration Service
|
||||
// Integration with Proxmox VE API for container deployment
|
||||
|
||||
import { retryWithBackoff } from '@/shared/utils/retry';
|
||||
import { logger } from '@/infrastructure/monitoring/logger';
|
||||
import { proxmoxCircuitBreaker } from '@/shared/utils/circuit-breaker';
|
||||
|
||||
export interface ProxmoxConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
username: string;
|
||||
password: string;
|
||||
realm?: string;
|
||||
}
|
||||
|
||||
export interface ContainerSpec {
|
||||
vmid: number;
|
||||
hostname: string;
|
||||
cores: number;
|
||||
memory: number;
|
||||
disk: number;
|
||||
network: {
|
||||
ip: string;
|
||||
gateway: string;
|
||||
vlan?: number;
|
||||
};
|
||||
template: string;
|
||||
config: any;
|
||||
}
|
||||
|
||||
export interface DeploymentResult {
|
||||
success: boolean;
|
||||
containers: Array<{
|
||||
name: string;
|
||||
vmid: number;
|
||||
status: string;
|
||||
ip?: string;
|
||||
}>;
|
||||
errors?: string[];
|
||||
}
|
||||
|
||||
export class ProxmoxVEIntegration {
|
||||
private config: ProxmoxConfig;
|
||||
private apiBaseUrl: string;
|
||||
private token?: string;
|
||||
private tokenExpiry?: Date;
|
||||
|
||||
constructor(config: ProxmoxConfig) {
|
||||
this.config = {
|
||||
realm: 'pam',
|
||||
...config,
|
||||
};
|
||||
this.apiBaseUrl = `https://${config.host}:${config.port || 8006}/api2/json`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate with Proxmox VE
|
||||
*/
|
||||
async authenticate(): Promise<void> {
|
||||
await proxmoxCircuitBreaker.execute(async () => {
|
||||
return await retryWithBackoff(
|
||||
async () => {
|
||||
const response = await fetch(`${this.apiBaseUrl}/access/ticket`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
username: `${this.config.username}@${this.config.realm}`,
|
||||
password: this.config.password,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Authentication failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
if (data.data?.ticket) {
|
||||
this.token = data.data.ticket;
|
||||
this.tokenExpiry = new Date(Date.now() + 3600000); // 1 hour
|
||||
logger.info('Proxmox VE authentication successful', {
|
||||
host: this.config.host,
|
||||
username: this.config.username,
|
||||
});
|
||||
} else {
|
||||
throw new Error('Invalid authentication response');
|
||||
}
|
||||
},
|
||||
{
|
||||
maxRetries: 3,
|
||||
initialDelayMs: 1000,
|
||||
onRetry: (attempt, error) => {
|
||||
logger.warn('Proxmox authentication retry', {
|
||||
attempt,
|
||||
error: error.message,
|
||||
});
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create LXC container
|
||||
*/
|
||||
async createContainer(spec: ContainerSpec, node: string = 'pve'): Promise<{ vmid: number; status: string }> {
|
||||
await this.ensureAuthenticated();
|
||||
|
||||
const containerConfig: any = {
|
||||
vmid: spec.vmid,
|
||||
hostname: spec.hostname,
|
||||
cores: spec.cores,
|
||||
memory: spec.memory,
|
||||
net0: `name=eth0,bridge=vmbr0,ip=${spec.network.ip}/24,gw=${spec.network.gateway}`,
|
||||
rootfs: `local-lvm:${spec.disk}`,
|
||||
ostype: 'ubuntu',
|
||||
template: spec.template,
|
||||
...spec.config,
|
||||
};
|
||||
|
||||
if (spec.network.vlan) {
|
||||
containerConfig.net0 = `name=eth0,bridge=vmbr0,ip=${spec.network.ip}/24,gw=${spec.network.gateway},tag=${spec.network.vlan}`;
|
||||
}
|
||||
|
||||
const response = await this.proxmoxRequest('POST', `/nodes/${node}/lxc`, containerConfig);
|
||||
|
||||
return {
|
||||
vmid: spec.vmid,
|
||||
status: 'created',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure container network
|
||||
*/
|
||||
async configureContainerNetwork(vmid: number, network: ContainerSpec['network'], node: string = 'pve'): Promise<void> {
|
||||
await this.ensureAuthenticated();
|
||||
|
||||
const networkConfig: any = {
|
||||
net0: `name=eth0,bridge=vmbr0,ip=${network.ip}/24,gw=${network.gateway}`,
|
||||
};
|
||||
|
||||
if (network.vlan) {
|
||||
networkConfig.net0 = `name=eth0,bridge=vmbr0,ip=${network.ip}/24,gw=${network.gateway},tag=${network.vlan}`;
|
||||
}
|
||||
|
||||
await this.proxmoxRequest('PUT', `/nodes/${node}/lxc/${vmid}/config`, networkConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start container
|
||||
*/
|
||||
async startContainer(vmid: number, node: string = 'pve'): Promise<void> {
|
||||
await this.ensureAuthenticated();
|
||||
|
||||
await this.proxmoxRequest('POST', `/nodes/${node}/lxc/${vmid}/status/start`, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get container status
|
||||
*/
|
||||
async getContainerStatus(vmid: number, node: string = 'pve'): Promise<{ status: string; ip?: string }> {
|
||||
await this.ensureAuthenticated();
|
||||
|
||||
const response = await this.proxmoxRequest('GET', `/nodes/${node}/lxc/${vmid}/status/current`, null);
|
||||
|
||||
const status = response.data?.status || 'unknown';
|
||||
const ip = response.data?.net0?.ip || undefined;
|
||||
|
||||
return {
|
||||
status,
|
||||
ip,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Deploy IRU containers
|
||||
*/
|
||||
async deployIRUContainers(
|
||||
containers: ContainerSpec[],
|
||||
node?: string
|
||||
): Promise<DeploymentResult> {
|
||||
await this.ensureAuthenticated();
|
||||
|
||||
const results: DeploymentResult['containers'] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
for (const container of containers) {
|
||||
try {
|
||||
// Create container
|
||||
const created = await this.createContainer(container);
|
||||
|
||||
// Configure network
|
||||
await this.configureContainerNetwork(created.vmid, container.network);
|
||||
|
||||
// Start container
|
||||
await this.startContainer(created.vmid);
|
||||
|
||||
// Get status
|
||||
const status = await this.getContainerStatus(created.vmid);
|
||||
|
||||
results.push({
|
||||
name: container.hostname,
|
||||
vmid: created.vmid,
|
||||
status: status.status,
|
||||
ip: status.ip,
|
||||
});
|
||||
} catch (error) {
|
||||
errors.push(`Failed to deploy ${container.hostname}: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: errors.length === 0,
|
||||
containers: results,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure authenticated
|
||||
*/
|
||||
private async ensureAuthenticated(): Promise<void> {
|
||||
if (!this.token || !this.tokenExpiry || this.tokenExpiry < new Date()) {
|
||||
await this.authenticate();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make Proxmox VE API request with retry
|
||||
*/
|
||||
private async proxmoxRequest(method: string, path: string, body: any): Promise<any> {
|
||||
await this.ensureAuthenticated();
|
||||
|
||||
return await proxmoxCircuitBreaker.execute(async () => {
|
||||
return await retryWithBackoff(
|
||||
async () => {
|
||||
const url = `${this.apiBaseUrl}${path}`;
|
||||
const options: RequestInit = {
|
||||
method,
|
||||
headers: {
|
||||
'Cookie': `PVEAuthCookie=${this.token}`,
|
||||
},
|
||||
};
|
||||
|
||||
if (body && method !== 'GET') {
|
||||
options.headers = {
|
||||
...options.headers,
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
options.body = JSON.stringify(body);
|
||||
}
|
||||
|
||||
const response = await fetch(url, options);
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`Proxmox API error: ${response.status} ${response.statusText} - ${errorText}`);
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
},
|
||||
{
|
||||
maxRetries: 3,
|
||||
initialDelayMs: 1000,
|
||||
onRetry: (attempt, error) => {
|
||||
logger.warn('Proxmox API request retry', {
|
||||
method,
|
||||
path,
|
||||
attempt,
|
||||
error: error.message,
|
||||
});
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate required Proxmox environment variables
|
||||
function getProxmoxConfig(): ProxmoxConfig {
|
||||
const host = process.env.PROXMOX_HOST;
|
||||
const username = process.env.PROXMOX_USERNAME;
|
||||
const password = process.env.PROXMOX_PASSWORD;
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (!host) {
|
||||
throw new Error('PROXMOX_HOST environment variable is required in production');
|
||||
}
|
||||
if (!username) {
|
||||
throw new Error('PROXMOX_USERNAME environment variable is required in production');
|
||||
}
|
||||
if (!password) {
|
||||
throw new Error('PROXMOX_PASSWORD environment variable is required in production');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
host: host || 'localhost',
|
||||
port: parseInt(process.env.PROXMOX_PORT || '8006'),
|
||||
username: username || 'root',
|
||||
password: password || '',
|
||||
realm: process.env.PROXMOX_REALM || 'pam',
|
||||
};
|
||||
}
|
||||
|
||||
export const proxmoxVEIntegration = new ProxmoxVEIntegration(getProxmoxConfig());
|
||||
@@ -51,12 +51,12 @@ export class MigrationRoadmapService {
|
||||
];
|
||||
|
||||
for (const phaseData of phases) {
|
||||
const existing = await prisma.quantumMigrationPhase.findFirst({
|
||||
const existing = await prisma.quantum_migration_phases.findFirst({
|
||||
where: { phaseNumber: phaseData.phaseNumber },
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
await prisma.quantumMigrationPhase.create({
|
||||
await prisma.quantum_migration_phases.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
phaseNumber: phaseData.phaseNumber,
|
||||
@@ -74,7 +74,7 @@ export class MigrationRoadmapService {
|
||||
* Get phase by number
|
||||
*/
|
||||
async getPhase(phaseNumber: number) {
|
||||
return await prisma.quantumMigrationPhase.findFirst({
|
||||
return await prisma.quantum_migration_phases.findFirst({
|
||||
where: { phaseNumber },
|
||||
include: { migrations: true },
|
||||
});
|
||||
@@ -84,7 +84,7 @@ export class MigrationRoadmapService {
|
||||
* Get all phases
|
||||
*/
|
||||
async getAllPhases() {
|
||||
return await prisma.quantumMigrationPhase.findMany({
|
||||
return await prisma.quantum_migration_phases.findMany({
|
||||
include: { migrations: true },
|
||||
orderBy: { phaseNumber: 'asc' },
|
||||
});
|
||||
@@ -94,7 +94,7 @@ export class MigrationRoadmapService {
|
||||
* Start phase
|
||||
*/
|
||||
async startPhase(phaseNumber: number) {
|
||||
const phase = await prisma.quantumMigrationPhase.findFirst({
|
||||
const phase = await prisma.quantum_migration_phases.findFirst({
|
||||
where: { phaseNumber },
|
||||
});
|
||||
|
||||
@@ -102,7 +102,7 @@ export class MigrationRoadmapService {
|
||||
throw new Error(`Phase ${phaseNumber} not found`);
|
||||
}
|
||||
|
||||
return await prisma.quantumMigrationPhase.update({
|
||||
return await prisma.quantum_migration_phases.update({
|
||||
where: { id: phase.id },
|
||||
data: {
|
||||
status: 'in_progress',
|
||||
@@ -115,7 +115,7 @@ export class MigrationRoadmapService {
|
||||
* Complete phase
|
||||
*/
|
||||
async completePhase(phaseNumber: number) {
|
||||
const phase = await prisma.quantumMigrationPhase.findFirst({
|
||||
const phase = await prisma.quantum_migration_phases.findFirst({
|
||||
where: { phaseNumber },
|
||||
include: { migrations: true },
|
||||
});
|
||||
@@ -135,7 +135,7 @@ export class MigrationRoadmapService {
|
||||
);
|
||||
}
|
||||
|
||||
return await prisma.quantumMigrationPhase.update({
|
||||
return await prisma.quantum_migration_phases.update({
|
||||
where: { id: phase.id },
|
||||
data: {
|
||||
status: 'completed',
|
||||
@@ -148,7 +148,7 @@ export class MigrationRoadmapService {
|
||||
* Register component migration
|
||||
*/
|
||||
async registerMigration(data: MigrationStatus) {
|
||||
const phase = await prisma.quantumMigrationPhase.findUnique({
|
||||
const phase = await prisma.quantum_migration_phases.findUnique({
|
||||
where: { id: data.phaseId },
|
||||
});
|
||||
|
||||
@@ -156,7 +156,7 @@ export class MigrationRoadmapService {
|
||||
throw new Error(`Phase not found: ${data.phaseId}`);
|
||||
}
|
||||
|
||||
const migration = await prisma.migrationAudit.create({
|
||||
const migration = await prisma.migration_audits.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
phaseId: data.phaseId,
|
||||
@@ -195,7 +195,7 @@ export class MigrationRoadmapService {
|
||||
updateData.migrationDate = new Date();
|
||||
}
|
||||
|
||||
return await prisma.migrationAudit.update({
|
||||
return await prisma.migration_audits.update({
|
||||
where: { id: migrationId },
|
||||
data: updateData,
|
||||
});
|
||||
@@ -205,7 +205,7 @@ export class MigrationRoadmapService {
|
||||
* Get migration status for component
|
||||
*/
|
||||
async getComponentMigrationStatus(componentType: string, componentId: string) {
|
||||
return await prisma.migrationAudit.findMany({
|
||||
return await prisma.migration_audits.findMany({
|
||||
where: {
|
||||
componentType,
|
||||
componentId,
|
||||
@@ -227,7 +227,7 @@ export class MigrationRoadmapService {
|
||||
0
|
||||
);
|
||||
|
||||
const allMigrations = await prisma.migrationAudit.findMany();
|
||||
const allMigrations = await prisma.migration_audits.findMany();
|
||||
const completedMigrations = allMigrations.filter(
|
||||
(m) => m.migrationStatus === 'completed'
|
||||
).length;
|
||||
|
||||
@@ -31,7 +31,7 @@ export class PQCKeyManagerService {
|
||||
* Get key by ID
|
||||
*/
|
||||
async getKey(keyId: string) {
|
||||
return await prisma.cryptographicKey.findUnique({
|
||||
return await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId },
|
||||
});
|
||||
}
|
||||
@@ -40,7 +40,7 @@ export class PQCKeyManagerService {
|
||||
* Get keys by purpose
|
||||
*/
|
||||
async getKeysByPurpose(keyPurpose: string) {
|
||||
return await prisma.cryptographicKey.findMany({
|
||||
return await prisma.cryptographic_keys.findMany({
|
||||
where: {
|
||||
keyPurpose,
|
||||
status: 'active',
|
||||
@@ -53,7 +53,7 @@ export class PQCKeyManagerService {
|
||||
* Get keys by type
|
||||
*/
|
||||
async getKeysByType(keyType: string) {
|
||||
return await prisma.cryptographicKey.findMany({
|
||||
return await prisma.cryptographic_keys.findMany({
|
||||
where: {
|
||||
keyType,
|
||||
status: 'active',
|
||||
@@ -66,7 +66,7 @@ export class PQCKeyManagerService {
|
||||
* Rotate key
|
||||
*/
|
||||
async rotateKey(keyId: string, newKeyPurpose?: string) {
|
||||
const oldKey = await prisma.cryptographicKey.findUnique({
|
||||
const oldKey = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId },
|
||||
});
|
||||
|
||||
@@ -91,7 +91,7 @@ export class PQCKeyManagerService {
|
||||
);
|
||||
|
||||
// Mark old key as rotated
|
||||
await prisma.cryptographicKey.update({
|
||||
await prisma.cryptographic_keys.update({
|
||||
where: { keyId },
|
||||
data: {
|
||||
status: 'rotated',
|
||||
@@ -106,7 +106,7 @@ export class PQCKeyManagerService {
|
||||
* Revoke key
|
||||
*/
|
||||
async revokeKey(keyId: string, reason?: string) {
|
||||
return await prisma.cryptographicKey.update({
|
||||
return await prisma.cryptographic_keys.update({
|
||||
where: { keyId },
|
||||
data: {
|
||||
status: 'revoked',
|
||||
@@ -118,7 +118,7 @@ export class PQCKeyManagerService {
|
||||
* Check key expiration
|
||||
*/
|
||||
async checkKeyExpiration(keyId: string): Promise<boolean> {
|
||||
const key = await prisma.cryptographicKey.findUnique({
|
||||
const key = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId },
|
||||
});
|
||||
|
||||
@@ -136,7 +136,7 @@ export class PQCKeyManagerService {
|
||||
const thresholdDate = new Date();
|
||||
thresholdDate.setDate(thresholdDate.getDate() + daysAhead);
|
||||
|
||||
return await prisma.cryptographicKey.findMany({
|
||||
return await prisma.cryptographic_keys.findMany({
|
||||
where: {
|
||||
status: 'active',
|
||||
expiresAt: {
|
||||
@@ -155,7 +155,7 @@ export class PQCKeyManagerService {
|
||||
sovereignIdentityId: string,
|
||||
quantumKeyId: string
|
||||
) {
|
||||
const key = await prisma.cryptographicKey.findUnique({
|
||||
const key = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId: quantumKeyId },
|
||||
});
|
||||
|
||||
@@ -167,7 +167,7 @@ export class PQCKeyManagerService {
|
||||
throw new Error(`Key ${quantumKeyId} is not a quantum-safe key`);
|
||||
}
|
||||
|
||||
return await prisma.sovereignIdentity.update({
|
||||
return await prisma.sovereign_identities.update({
|
||||
where: { id: sovereignIdentityId },
|
||||
data: {
|
||||
quantumKeyId,
|
||||
@@ -180,13 +180,13 @@ export class PQCKeyManagerService {
|
||||
* Get quantum-enabled identities
|
||||
*/
|
||||
async getQuantumEnabledIdentities() {
|
||||
return await prisma.sovereignIdentity.findMany({
|
||||
return await prisma.sovereign_identities.findMany({
|
||||
where: {
|
||||
isQuantumEnabled: true,
|
||||
status: 'active',
|
||||
},
|
||||
include: {
|
||||
sovereignBank: true,
|
||||
sovereign_banks: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -225,7 +225,7 @@ export class QuantumCompatibilityService {
|
||||
): Promise<string> {
|
||||
const mappingId = `QPS-MAP-${uuidv4()}`;
|
||||
|
||||
await prisma.legacyProtocolMapping.create({
|
||||
await prisma.legacy_protocol_mappings.create({
|
||||
data: {
|
||||
mappingId,
|
||||
legacyProtocol,
|
||||
@@ -246,7 +246,7 @@ export class QuantumCompatibilityService {
|
||||
* Get protocol mapping
|
||||
*/
|
||||
async getProtocolMapping(legacyProtocol: string) {
|
||||
return await prisma.legacyProtocolMapping.findFirst({
|
||||
return await prisma.legacy_protocol_mappings.findFirst({
|
||||
where: { legacyProtocol, status: 'active' },
|
||||
});
|
||||
}
|
||||
|
||||
@@ -47,8 +47,9 @@ export class QuantumEnvelopeService {
|
||||
await this.createDimensionalHarmonizationHash(request.transactionData);
|
||||
|
||||
// Step 4: Create envelope record
|
||||
const envelope = await prisma.quantumEnvelope.create({
|
||||
const envelope = await prisma.quantum_envelopes.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
envelopeId,
|
||||
legacyTransactionId: request.legacyTransactionId,
|
||||
legacyProtocol: request.legacyProtocol,
|
||||
@@ -57,6 +58,8 @@ export class QuantumEnvelopeService {
|
||||
dimensionalHarmonizationHash,
|
||||
transactionData: request.transactionData as any,
|
||||
status: 'created',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -125,7 +128,7 @@ export class QuantumEnvelopeService {
|
||||
private async getPreviousTransactionHash(
|
||||
sourceBankId: string
|
||||
): Promise<string | null> {
|
||||
const lastTransaction = await prisma.quantumProxyTransaction.findFirst({
|
||||
const lastTransaction = await prisma.quantum_proxy_transactions.findFirst({
|
||||
where: { sourceBankId },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
});
|
||||
@@ -138,7 +141,7 @@ export class QuantumEnvelopeService {
|
||||
*/
|
||||
private async getCausalOrder(transactionData: any): Promise<number> {
|
||||
// Get count of transactions from source bank
|
||||
const count = await prisma.quantumProxyTransaction.count({
|
||||
const count = await prisma.quantum_proxy_transactions.count({
|
||||
where: { sourceBankId: transactionData.sourceBankId },
|
||||
});
|
||||
|
||||
@@ -182,7 +185,7 @@ export class QuantumEnvelopeService {
|
||||
* Get envelope by ID
|
||||
*/
|
||||
async getEnvelope(envelopeId: string) {
|
||||
return await prisma.quantumEnvelope.findUnique({
|
||||
return await prisma.quantum_envelopes.findUnique({
|
||||
where: { envelopeId },
|
||||
});
|
||||
}
|
||||
@@ -191,7 +194,7 @@ export class QuantumEnvelopeService {
|
||||
* Verify envelope integrity
|
||||
*/
|
||||
async verifyEnvelopeIntegrity(envelopeId: string): Promise<boolean> {
|
||||
const envelope = await prisma.quantumEnvelope.findUnique({
|
||||
const envelope = await prisma.quantum_envelopes.findUnique({
|
||||
where: { envelopeId },
|
||||
});
|
||||
|
||||
|
||||
@@ -77,8 +77,9 @@ export class QuantumProxyService {
|
||||
);
|
||||
|
||||
// Step 5: Create proxy transaction record
|
||||
const proxyTransaction = await prisma.quantumProxyTransaction.create({
|
||||
const proxyTransaction = await prisma.quantum_proxy_transactions.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
proxyTransactionId,
|
||||
legacyTransactionId: request.legacyTransactionId,
|
||||
legacyProtocol: request.legacyProtocol,
|
||||
@@ -91,6 +92,8 @@ export class QuantumProxyService {
|
||||
currencyCode: request.currencyCode,
|
||||
status: 'bridged',
|
||||
bridgedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -113,8 +116,9 @@ export class QuantumProxyService {
|
||||
});
|
||||
|
||||
// Create failed transaction record
|
||||
await prisma.quantumProxyTransaction.create({
|
||||
await prisma.quantum_proxy_transactions.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
proxyTransactionId,
|
||||
legacyTransactionId: request.legacyTransactionId,
|
||||
legacyProtocol: request.legacyProtocol,
|
||||
@@ -123,6 +127,8 @@ export class QuantumProxyService {
|
||||
amount: new Decimal(request.amount),
|
||||
currencyCode: request.currencyCode,
|
||||
status: 'failed',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -178,11 +184,11 @@ export class QuantumProxyService {
|
||||
* Get proxy transaction by ID
|
||||
*/
|
||||
async getProxyTransaction(proxyTransactionId: string) {
|
||||
return await prisma.quantumProxyTransaction.findUnique({
|
||||
return await prisma.quantum_proxy_transactions.findUnique({
|
||||
where: { proxyTransactionId },
|
||||
include: {
|
||||
envelope: true,
|
||||
translation: true,
|
||||
quantum_envelopes: true,
|
||||
quantum_translations: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -191,7 +197,7 @@ export class QuantumProxyService {
|
||||
* Get proxy transactions for a bank
|
||||
*/
|
||||
async getProxyTransactionsForBank(sovereignBankId: string) {
|
||||
return await prisma.quantumProxyTransaction.findMany({
|
||||
return await prisma.quantum_proxy_transactions.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ sourceBankId: sovereignBankId },
|
||||
@@ -200,8 +206,8 @@ export class QuantumProxyService {
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
include: {
|
||||
envelope: true,
|
||||
translation: true,
|
||||
quantum_envelopes: true,
|
||||
quantum_translations: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -213,7 +219,7 @@ export class QuantumProxyService {
|
||||
legacyProtocol: string,
|
||||
limit: number = 100
|
||||
) {
|
||||
return await prisma.quantumProxyTransaction.findMany({
|
||||
return await prisma.quantum_proxy_transactions.findMany({
|
||||
where: { legacyProtocol },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: limit,
|
||||
|
||||
@@ -58,8 +58,9 @@ export class QuantumTranslationService {
|
||||
const quantumCompatibleCurrency = fxTranslation.quantumCurrency;
|
||||
|
||||
// Step 5: Save translation record
|
||||
const translation = await prisma.quantumTranslation.create({
|
||||
const translation = await prisma.quantum_translations.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
translationId,
|
||||
legacyProtocol: request.legacyProtocol,
|
||||
legacyAmount: new Decimal(request.amount),
|
||||
@@ -71,6 +72,8 @@ export class QuantumTranslationService {
|
||||
protocolMapping: protocolMapping as any,
|
||||
transactionData: request.transactionData as any,
|
||||
status: 'completed',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -95,7 +98,7 @@ export class QuantumTranslationService {
|
||||
private async getProtocolMapping(
|
||||
legacyProtocol: string
|
||||
): Promise<any> {
|
||||
const mapping = await prisma.legacyProtocolMapping.findFirst({
|
||||
const mapping = await prisma.legacy_protocol_mappings.findFirst({
|
||||
where: { legacyProtocol, status: 'active' },
|
||||
});
|
||||
|
||||
@@ -261,7 +264,7 @@ export class QuantumTranslationService {
|
||||
* Get translation by ID
|
||||
*/
|
||||
async getTranslation(translationId: string) {
|
||||
return await prisma.quantumTranslation.findUnique({
|
||||
return await prisma.quantum_translations.findUnique({
|
||||
where: { translationId },
|
||||
});
|
||||
}
|
||||
@@ -273,7 +276,7 @@ export class QuantumTranslationService {
|
||||
legacyProtocol: string,
|
||||
limit: number = 100
|
||||
) {
|
||||
return await prisma.quantumTranslation.findMany({
|
||||
return await prisma.quantum_translations.findMany({
|
||||
where: { legacyProtocol },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: limit,
|
||||
|
||||
@@ -36,11 +36,11 @@ export class QuantumCryptoService {
|
||||
pqcKeyId: string
|
||||
): Promise<HybridSignature> {
|
||||
// Get keys
|
||||
const eccKey = await prisma.cryptographicKey.findUnique({
|
||||
const eccKey = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId: eccKeyId },
|
||||
});
|
||||
|
||||
const pqcKey = await prisma.cryptographicKey.findUnique({
|
||||
const pqcKey = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId: pqcKeyId },
|
||||
});
|
||||
|
||||
@@ -72,11 +72,11 @@ export class QuantumCryptoService {
|
||||
eccKeyId: string,
|
||||
pqcKeyId: string
|
||||
): Promise<boolean> {
|
||||
const eccKey = await prisma.cryptographicKey.findUnique({
|
||||
const eccKey = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId: eccKeyId },
|
||||
});
|
||||
|
||||
const pqcKey = await prisma.cryptographicKey.findUnique({
|
||||
const pqcKey = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId: pqcKeyId },
|
||||
});
|
||||
|
||||
@@ -100,7 +100,7 @@ export class QuantumCryptoService {
|
||||
const publicKey = `ecc_521_public_${keyId}`;
|
||||
const privateKeyRef = `hsm_ecc_521_${keyId}`;
|
||||
|
||||
await prisma.cryptographicKey.create({
|
||||
await prisma.cryptographic_keys.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
keyId,
|
||||
@@ -130,7 +130,7 @@ export class QuantumCryptoService {
|
||||
|
||||
const keyType = algorithm === 'CRYSTALS-Kyber' ? 'pqc_kyber' : 'pqc_dilithium';
|
||||
|
||||
await prisma.cryptographicKey.create({
|
||||
await prisma.cryptographic_keys.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
keyId,
|
||||
@@ -171,7 +171,7 @@ export class QuantumCryptoService {
|
||||
* Rotate key
|
||||
*/
|
||||
async rotateKey(oldKeyId: string, newKeyPurpose?: string): Promise<string> {
|
||||
const oldKey = await prisma.cryptographicKey.findUnique({
|
||||
const oldKey = await prisma.cryptographic_keys.findUnique({
|
||||
where: { keyId: oldKeyId },
|
||||
});
|
||||
|
||||
@@ -180,7 +180,7 @@ export class QuantumCryptoService {
|
||||
}
|
||||
|
||||
// Mark old key as rotated
|
||||
await prisma.cryptographicKey.update({
|
||||
await prisma.cryptographic_keys.update({
|
||||
where: { keyId: oldKeyId },
|
||||
data: {
|
||||
status: 'rotated',
|
||||
|
||||
@@ -25,7 +25,7 @@ export class SciReplicationService {
|
||||
const replicationId = `REP-${uuidv4()}`;
|
||||
|
||||
// Create replication record
|
||||
await prisma.sovereignReplication.create({
|
||||
await prisma.sovereign_replications.create({
|
||||
data: {
|
||||
replicationId,
|
||||
zoneId: request.zoneId,
|
||||
@@ -70,7 +70,7 @@ export class SciReplicationService {
|
||||
*/
|
||||
private async updateMetadataHash(replicationId: string, zoneId: string): Promise<void> {
|
||||
// Get zone metadata
|
||||
const zone = await prisma.sovereignComputeZone.findUnique({
|
||||
const zone = await prisma.sovereign_compute_zones.findUnique({
|
||||
where: { zoneId },
|
||||
include: {
|
||||
contracts: true,
|
||||
@@ -98,7 +98,7 @@ export class SciReplicationService {
|
||||
const metadataHash = createHash('sha256').update(metadataString).digest('hex');
|
||||
|
||||
// Update replication record
|
||||
await prisma.sovereignReplication.update({
|
||||
await prisma.sovereign_replications.update({
|
||||
where: { replicationId },
|
||||
data: {
|
||||
metadataHash,
|
||||
@@ -121,7 +121,7 @@ export class SciReplicationService {
|
||||
}
|
||||
|
||||
// Update replication status
|
||||
await prisma.sovereignReplication.update({
|
||||
await prisma.sovereign_replications.update({
|
||||
where: { replicationId },
|
||||
data: { status: 'paused' },
|
||||
});
|
||||
@@ -133,7 +133,7 @@ export class SciReplicationService {
|
||||
* Get replication status
|
||||
*/
|
||||
async getReplicationStatus(replicationId: string) {
|
||||
const replication = await prisma.sovereignReplication.findUnique({
|
||||
const replication = await prisma.sovereign_replications.findUnique({
|
||||
where: { replicationId },
|
||||
include: {
|
||||
zone: true,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
// TPM-backed nodes, PQ-encrypted channels (Kyber), cross-zone firewalls, integrity attestations
|
||||
|
||||
import prisma from '@/shared/database/prisma';
|
||||
import { Prisma } from '@prisma/client';
|
||||
import { createHash } from 'crypto';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { logger } from '@/infrastructure/monitoring/logger';
|
||||
@@ -31,15 +32,18 @@ export class SciSecurityService {
|
||||
const integrityHash = this.calculateIntegrityHash(request.tpmData);
|
||||
|
||||
// Create attestation
|
||||
await prisma.sovereignAttestation.create({
|
||||
await prisma.sovereign_attestations.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
attestationId,
|
||||
zoneId: request.zoneId,
|
||||
attestationType: 'tpm_integrity',
|
||||
attestationData: request.tpmData as unknown as Record<string, unknown>,
|
||||
attestationData: request.tpmData as Prisma.InputJsonValue,
|
||||
integrityHash,
|
||||
status: 'verified',
|
||||
verifiedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -69,8 +73,9 @@ export class SciSecurityService {
|
||||
const encryptionKey = keyPair.publicKey;
|
||||
|
||||
// Create attestation for PQ encryption
|
||||
await prisma.sovereignAttestation.create({
|
||||
await prisma.sovereign_attestations.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
attestationId: `ATTEST-${uuidv4()}`,
|
||||
zoneId: sourceZoneId,
|
||||
attestationType: 'pq_encryption',
|
||||
@@ -79,10 +84,12 @@ export class SciSecurityService {
|
||||
targetZoneId,
|
||||
encryptionKey,
|
||||
algorithm: 'kyber',
|
||||
} as unknown as Record<string, unknown>,
|
||||
} as Prisma.InputJsonValue,
|
||||
integrityHash: createHash('sha256').update(encryptionKey).digest('hex'),
|
||||
status: 'verified',
|
||||
verifiedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -99,7 +106,7 @@ export class SciSecurityService {
|
||||
* Verify cross-zone firewall
|
||||
*/
|
||||
async verifyCrossZoneFirewall(zoneId: string): Promise<boolean> {
|
||||
const zone = await prisma.sovereignComputeZone.findUnique({
|
||||
const zone = await prisma.sovereign_compute_zones.findUnique({
|
||||
where: { zoneId },
|
||||
});
|
||||
|
||||
@@ -116,18 +123,21 @@ export class SciSecurityService {
|
||||
}
|
||||
|
||||
// Create firewall attestation
|
||||
await prisma.sovereignAttestation.create({
|
||||
await prisma.sovereign_attestations.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
attestationId: `ATTEST-${uuidv4()}`,
|
||||
zoneId,
|
||||
attestationType: 'cross_zone_firewall',
|
||||
attestationData: {
|
||||
networkSegmentation: true,
|
||||
firewallEnabled: true,
|
||||
} as unknown as Record<string, unknown>,
|
||||
} as Prisma.InputJsonValue,
|
||||
integrityHash: createHash('sha256').update(zoneId).digest('hex'),
|
||||
status: 'verified',
|
||||
verifiedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -147,15 +157,18 @@ export class SciSecurityService {
|
||||
const integrityHash = this.calculateIntegrityHash(request.attestationData);
|
||||
|
||||
// Create attestation
|
||||
await prisma.sovereignAttestation.create({
|
||||
await prisma.sovereign_attestations.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
attestationId,
|
||||
zoneId: request.zoneId,
|
||||
attestationType: request.attestationType,
|
||||
attestationData: request.attestationData as unknown as Record<string, unknown>,
|
||||
attestationData: request.attestationData as Prisma.InputJsonValue,
|
||||
integrityHash,
|
||||
status: 'verified',
|
||||
verifiedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -180,7 +193,7 @@ export class SciSecurityService {
|
||||
* Verify attestation
|
||||
*/
|
||||
async verifyAttestation(attestationId: string): Promise<boolean> {
|
||||
const attestation = await prisma.sovereignAttestation.findUnique({
|
||||
const attestation = await prisma.sovereign_attestations.findUnique({
|
||||
where: { attestationId },
|
||||
});
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ export class SciSevmService {
|
||||
const contractHash = createHash('sha256').update(request.contractCode).digest('hex');
|
||||
|
||||
// Deploy contract
|
||||
await prisma.sevmContract.create({
|
||||
await prisma.sevm_contracts.create({
|
||||
data: {
|
||||
contractId,
|
||||
zoneId: request.zoneId,
|
||||
@@ -64,7 +64,7 @@ export class SciSevmService {
|
||||
const executionId = `SEVM-EXEC-${uuidv4()}`;
|
||||
|
||||
// Get contract
|
||||
const contract = await prisma.sevmContract.findUnique({
|
||||
const contract = await prisma.sevm_contracts.findUnique({
|
||||
where: { contractId: request.contractId },
|
||||
});
|
||||
|
||||
@@ -114,7 +114,7 @@ export class SciSevmService {
|
||||
* Verify execution permission
|
||||
*/
|
||||
private async verifyExecutionPermission(contractId: string, callerBankId: string): Promise<boolean> {
|
||||
const contract = await prisma.sevmContract.findUnique({
|
||||
const contract = await prisma.sevm_contracts.findUnique({
|
||||
where: { contractId },
|
||||
});
|
||||
|
||||
@@ -153,7 +153,7 @@ export class SciSevmService {
|
||||
* Get contract by address
|
||||
*/
|
||||
async getContractByAddress(contractAddress: string) {
|
||||
return await prisma.sevmContract.findUnique({
|
||||
return await prisma.sevm_contracts.findUnique({
|
||||
where: { contractAddress },
|
||||
});
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ export class SciZoneManagerService {
|
||||
};
|
||||
|
||||
// Create zone
|
||||
await prisma.sovereignComputeZone.create({
|
||||
await prisma.sovereign_compute_zones.create({
|
||||
data: {
|
||||
zoneId,
|
||||
sovereignBankId: request.sovereignBankId,
|
||||
@@ -61,7 +61,7 @@ export class SciZoneManagerService {
|
||||
* Configure PQ-HSM for zone
|
||||
*/
|
||||
async configurePqHsm(zoneId: string, hsmConfig: Record<string, unknown>): Promise<void> {
|
||||
await prisma.sovereignComputeZone.update({
|
||||
await prisma.sovereign_compute_zones.update({
|
||||
where: { zoneId },
|
||||
data: {
|
||||
pqHsmConfig: hsmConfig,
|
||||
@@ -75,7 +75,7 @@ export class SciZoneManagerService {
|
||||
* Enforce zero-trust isolation
|
||||
*/
|
||||
async enforceIsolation(zoneId: string): Promise<boolean> {
|
||||
const zone = await prisma.sovereignComputeZone.findUnique({
|
||||
const zone = await prisma.sovereign_compute_zones.findUnique({
|
||||
where: { zoneId },
|
||||
});
|
||||
|
||||
@@ -107,7 +107,7 @@ export class SciZoneManagerService {
|
||||
* Get zone by sovereign bank
|
||||
*/
|
||||
async getZoneBySovereign(sovereignBankId: string): Promise<{ zoneId: string } | null> {
|
||||
const zone = await prisma.sovereignComputeZone.findFirst({
|
||||
const zone = await prisma.sovereign_compute_zones.findFirst({
|
||||
where: {
|
||||
sovereignBankId,
|
||||
status: 'active',
|
||||
|
||||
Reference in New Issue
Block a user