chore: sync submodule state (parent ref update)

Made-with: Cursor
This commit is contained in:
defiQUG
2026-03-02 12:14:07 -08:00
parent 6c4555cebd
commit 89b82cdadb
883 changed files with 78752 additions and 18180 deletions

View File

@@ -0,0 +1,66 @@
-- Audit Bank Account Balances
-- Run this BEFORE applying balance constraints
-- Fix any inconsistencies found before running 004_balance_constraints.sql
-- Check for negative available balances
SELECT
id,
account_number,
currency_code,
balance,
available_balance,
reserved_balance,
'negative_available' as issue_type
FROM bank_accounts
WHERE available_balance < 0
ORDER BY available_balance ASC;
-- Check for negative reserved balances
SELECT
id,
account_number,
currency_code,
balance,
available_balance,
reserved_balance,
'negative_reserved' as issue_type
FROM bank_accounts
WHERE reserved_balance < 0
ORDER BY reserved_balance ASC;
-- Check for available > balance
SELECT
id,
account_number,
currency_code,
balance,
available_balance,
reserved_balance,
available_balance - balance as excess,
'available_exceeds_balance' as issue_type
FROM bank_accounts
WHERE available_balance > balance
ORDER BY (available_balance - balance) DESC;
-- Check for (available + reserved) > balance
SELECT
id,
account_number,
currency_code,
balance,
available_balance,
reserved_balance,
(available_balance + reserved_balance) - balance as excess,
'total_exceeds_balance' as issue_type
FROM bank_accounts
WHERE (available_balance + reserved_balance) > balance
ORDER BY ((available_balance + reserved_balance) - balance) DESC;
-- Summary count
SELECT
COUNT(*) FILTER (WHERE available_balance < 0) as negative_available_count,
COUNT(*) FILTER (WHERE reserved_balance < 0) as negative_reserved_count,
COUNT(*) FILTER (WHERE available_balance > balance) as available_exceeds_balance_count,
COUNT(*) FILTER (WHERE (available_balance + reserved_balance) > balance) as total_exceeds_balance_count,
COUNT(*) as total_accounts
FROM bank_accounts;

73
scripts/check-as4-status.sh Executable file
View File

@@ -0,0 +1,73 @@
#!/bin/bash
# Check AS4 Settlement System Status
# Comprehensive status check
set -e
BASE_URL="${AS4_BASE_URL:-http://localhost:3000}"
echo "========================================="
echo "AS4 Settlement System Status"
echo "========================================="
echo ""
# Check server health
echo "1. Server Health:"
curl -s "$BASE_URL/health" | jq '.' || echo " ✗ Server not responding"
echo ""
# Check AS4 metrics
echo "2. AS4 Metrics:"
curl -s "$BASE_URL/api/v1/as4/metrics/health" | jq '.' || echo " ✗ Metrics not available"
echo ""
# Check database tables
echo "3. Database Tables:"
if command -v psql &> /dev/null && [ -n "$DATABASE_URL" ]; then
psql "$DATABASE_URL" -c "
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name LIKE 'as4_%'
ORDER BY table_name;
" 2>/dev/null || echo " ⚠ Database not accessible"
else
echo " ⚠ PostgreSQL not available"
fi
echo ""
# Check Redis
echo "4. Redis Status:"
if command -v redis-cli &> /dev/null; then
if redis-cli ping &> /dev/null; then
echo " ✓ Redis is running"
else
echo " ✗ Redis is not responding"
fi
else
echo " ⚠ Redis CLI not available"
fi
echo ""
# Check certificates
echo "5. Certificates:"
if [ -f "certs/as4/as4-tls-cert.pem" ]; then
echo " ✓ TLS certificate exists"
openssl x509 -noout -subject -in certs/as4/as4-tls-cert.pem 2>/dev/null || true
else
echo " ⚠ TLS certificate not found"
fi
echo ""
# Check routes
echo "6. Route Registration:"
if grep -q "as4GatewayRoutes" src/integration/api-gateway/app.ts 2>/dev/null; then
echo " ✓ Routes registered in app.ts"
else
echo " ✗ Routes not registered"
fi
echo ""
echo "========================================="
echo "Status Check Complete"
echo "========================================="

141
scripts/check-database-status.sh Executable file
View File

@@ -0,0 +1,141 @@
#!/bin/bash
# Check Database Status for AS4 Settlement
# Verifies database connectivity and readiness
set -e
echo "========================================="
echo "AS4 Settlement Database Status Check"
echo "========================================="
echo ""
cd "$(dirname "$0")/.."
# Load environment variables
if [ -f .env ]; then
export $(grep -v '^#' .env | xargs)
else
echo "⚠ Warning: .env file not found"
echo ""
fi
# Check PostgreSQL client
echo "1. Checking PostgreSQL client..."
if command -v psql &> /dev/null; then
PSQL_VERSION=$(psql --version | head -1)
echo " ✓ PostgreSQL client installed: $PSQL_VERSION"
else
echo " ✗ PostgreSQL client not found"
exit 1
fi
echo ""
# Check DATABASE_URL
echo "2. Checking DATABASE_URL..."
if [ -z "$DATABASE_URL" ]; then
echo " ✗ DATABASE_URL not set"
echo ""
echo " Please set DATABASE_URL in .env file"
echo " Example: DATABASE_URL=postgresql://user:password@host:port/database"
exit 1
else
# Mask password in output
MASKED_URL=$(echo "$DATABASE_URL" | sed 's/:\/\/[^:]*:[^@]*@/:\/\/***:***@/')
echo " ✓ DATABASE_URL is set: $MASKED_URL"
fi
echo ""
# Test connection
echo "3. Testing database connection..."
if timeout 5 psql "$DATABASE_URL" -c "SELECT version();" &> /dev/null; then
PG_VERSION=$(timeout 5 psql "$DATABASE_URL" -c "SELECT version();" -t -A 2>/dev/null | head -1)
echo " ✓ Database connection successful"
echo " PostgreSQL version: $PG_VERSION"
else
echo " ✗ Database connection failed"
echo ""
echo " Possible issues:"
echo " - Database server not running"
echo " - Network connectivity issues"
echo " - Incorrect credentials"
echo " - Database does not exist"
exit 1
fi
echo ""
# Check database exists
echo "4. Checking database schema..."
if timeout 5 psql "$DATABASE_URL" -c "SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'public';" &> /dev/null; then
echo " ✓ Public schema exists"
else
echo " ✗ Public schema not found"
exit 1
fi
echo ""
# Check Prisma migrations table
echo "5. Checking Prisma migrations..."
if timeout 5 psql "$DATABASE_URL" -c "SELECT COUNT(*) FROM _prisma_migrations;" &> /dev/null 2>&1; then
MIGRATION_COUNT=$(timeout 5 psql "$DATABASE_URL" -c "SELECT COUNT(*) FROM _prisma_migrations;" -t -A 2>/dev/null | tr -d ' ')
echo " ✓ Prisma migrations table exists"
echo " Migration count: $MIGRATION_COUNT"
# Show last 5 migrations
echo ""
echo " Recent migrations:"
timeout 5 psql "$DATABASE_URL" -c "SELECT migration_name, finished_at FROM _prisma_migrations ORDER BY finished_at DESC LIMIT 5;" -t -A 2>/dev/null | while read line; do
if [ -n "$line" ]; then
echo " - $line"
fi
done
else
echo " ⚠ Prisma migrations table not found (database may be new)"
fi
echo ""
# Check for AS4 tables
echo "6. Checking AS4 tables..."
AS4_TABLES=$(timeout 5 psql "$DATABASE_URL" -c "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name LIKE 'as4_%' ORDER BY table_name;" -t -A 2>/dev/null | grep -v '^$' | wc -l | tr -d ' ')
if [ "$AS4_TABLES" -gt 0 ]; then
echo " ✓ Found $AS4_TABLES AS4 table(s)"
echo ""
echo " AS4 tables:"
timeout 5 psql "$DATABASE_URL" -c "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name LIKE 'as4_%' ORDER BY table_name;" -t -A 2>/dev/null | while read line; do
if [ -n "$line" ]; then
echo " - $line"
fi
done
else
echo " ⚠ No AS4 tables found (migration not yet applied)"
fi
echo ""
# Check Prisma client
echo "7. Checking Prisma client..."
if [ -f "node_modules/.prisma/client/index.js" ]; then
echo " ✓ Prisma client generated"
else
echo " ⚠ Prisma client not generated - run: npx prisma generate"
fi
echo ""
# Summary
echo "========================================="
echo "Database Status Summary"
echo "========================================="
echo "✓ Database connection: OK"
echo "✓ PostgreSQL version: $PG_VERSION"
if [ "$AS4_TABLES" -gt 0 ]; then
echo "✓ AS4 tables: Found ($AS4_TABLES tables)"
echo ""
echo "Status: ✅ Database is ready and AS4 tables exist"
else
echo "⚠ AS4 tables: Not found"
echo ""
echo "Status: ✅ Database is ready (migration needed)"
echo ""
echo "Next step: Run migration"
echo " npx prisma migrate deploy"
fi
echo ""

View File

@@ -0,0 +1,121 @@
#!/bin/bash
# Complete Chart of Accounts Setup
# This script attempts to:
# 1. Grant database permissions (if on Proxmox host)
# 2. Run migration
# 3. Initialize accounts
# 4. Verify setup
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
echo "=========================================="
echo "Complete Chart of Accounts Setup"
echo "=========================================="
echo ""
# Configuration
VMID="${VMID:-10100}"
DB_HOST="${DB_HOST:-192.168.11.105}"
DB_NAME="${DB_NAME:-dbis_core}"
DB_USER="${DB_USER:-dbis}"
# Step 1: Grant Database Permissions
echo "Step 1: Granting Database Permissions..."
echo ""
if command -v pct &> /dev/null; then
echo "✅ Running on Proxmox host - granting permissions..."
"$SCRIPT_DIR/grant-database-permissions.sh"
PERMISSIONS_GRANTED=$?
else
echo "⚠️ Not on Proxmox host - skipping permission grant"
echo " Permissions must be granted manually on Proxmox host:"
echo " ssh root@192.168.11.10"
echo " cd /root/proxmox/dbis_core"
echo " ./scripts/grant-database-permissions.sh"
echo ""
read -p "Have permissions been granted? (y/n): " PERMISSIONS_GRANTED_INPUT
if [[ "$PERMISSIONS_GRANTED_INPUT" == "y" || "$PERMISSIONS_GRANTED_INPUT" == "Y" ]]; then
PERMISSIONS_GRANTED=0
else
echo "❌ Please grant permissions first, then run this script again"
exit 1
fi
fi
if [ $PERMISSIONS_GRANTED -ne 0 ]; then
echo "❌ Failed to grant permissions"
exit 1
fi
echo ""
echo "Step 2: Running Migration..."
echo ""
# Run the migration script
"$SCRIPT_DIR/run-chart-of-accounts-migration.sh"
MIGRATION_STATUS=$?
if [ $MIGRATION_STATUS -ne 0 ]; then
echo "❌ Migration failed"
exit 1
fi
echo ""
echo "Step 3: Verifying Setup..."
echo ""
# Check if DATABASE_URL is available
if [ -z "$DATABASE_URL" ]; then
if [ -f .env ]; then
export $(cat .env | grep -v '^#' | xargs)
fi
fi
# Verify accounts were created
if command -v psql &> /dev/null && [ -n "$DATABASE_URL" ]; then
echo "Checking account count..."
ACCOUNT_COUNT=$(psql "$DATABASE_URL" -t -c "SELECT COUNT(*) FROM chart_of_accounts;" 2>/dev/null | xargs)
if [ -n "$ACCOUNT_COUNT" ] && [ "$ACCOUNT_COUNT" -gt 0 ]; then
echo "✅ Found $ACCOUNT_COUNT accounts in database"
# Show summary by category
echo ""
echo "Account Summary:"
psql "$DATABASE_URL" -c "
SELECT
category,
COUNT(*) as count
FROM chart_of_accounts
WHERE is_active = true
GROUP BY category
ORDER BY category;
" 2>/dev/null || true
else
echo "⚠️ Could not verify account count (this is okay if psql is not available)"
fi
else
echo "⚠️ psql not available or DATABASE_URL not set - skipping verification"
echo " You can verify manually:"
echo " psql \"$DATABASE_URL\" -c \"SELECT COUNT(*) FROM chart_of_accounts;\""
fi
echo ""
echo "=========================================="
echo "✅ Chart of Accounts Setup Complete!"
echo "=========================================="
echo ""
echo "Next steps:"
echo "1. Test API endpoints:"
echo " curl http://localhost:3000/api/accounting/chart-of-accounts"
echo ""
echo "2. View accounts by category:"
echo " curl http://localhost:3000/api/accounting/chart-of-accounts/category/ASSET"
echo ""

32
scripts/create-test-member.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/bin/bash
# Create Test Member for AS4 Settlement
# Creates a test member via API
set -e
BASE_URL="${AS4_BASE_URL:-http://localhost:3000}"
AUTH_TOKEN="${AS4_AUTH_TOKEN:-}"
MEMBER_ID="${1:-TEST-MEMBER-$(date +%s)}"
ORG_NAME="${2:-Test Bank}"
echo "Creating test member: $MEMBER_ID"
# Generate test certificate fingerprint (random)
FINGERPRINT=$(openssl rand -hex 32 | sed 's/\(..\)/\1:/g; s/:$//' | tr '[:lower:]' '[:upper:]')
curl -X POST "$BASE_URL/api/v1/as4/directory/members" \
-H "Content-Type: application/json" \
${AUTH_TOKEN:+-H "Authorization: Bearer $AUTH_TOKEN"} \
-d "{
\"memberId\": \"$MEMBER_ID\",
\"organizationName\": \"$ORG_NAME\",
\"as4EndpointUrl\": \"https://test-bank.example.com/as4\",
\"tlsCertFingerprint\": \"$FINGERPRINT\",
\"allowedMessageTypes\": [\"DBIS.SI.202\", \"DBIS.SI.202COV\"],
\"routingGroups\": [\"DEFAULT\"],
\"capacityTier\": 3
}" | jq '.'
echo ""
echo "Test member created: $MEMBER_ID"

View File

@@ -0,0 +1,79 @@
#!/bin/bash
# AS4 Settlement Deployment Script
set -e
echo "========================================="
echo "AS4 Settlement Deployment Script"
echo "========================================="
cd "$(dirname "$0")/.."
# Step 1: Generate Prisma Client
echo ""
echo "Step 1: Generating Prisma Client..."
npx prisma generate
# Step 2: Run Database Migration
echo ""
echo "Step 2: Running database migration..."
if npx prisma migrate deploy; then
echo "✓ Migration successful"
else
echo "⚠ Migration failed - database may not be available"
echo " Run manually when database is available:"
echo " npx prisma migrate deploy"
fi
# Step 3: Seed Marketplace Offering
echo ""
echo "Step 3: Seeding marketplace offering..."
if npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts; then
echo "✓ Marketplace offering seeded"
else
echo "⚠ Seeding failed - database may not be available"
echo " Run manually when database is available:"
echo " npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts"
fi
# Step 4: Verify TypeScript Compilation
echo ""
echo "Step 4: Verifying TypeScript compilation..."
if npx tsc --noEmit; then
echo "✓ TypeScript compilation successful"
else
echo "✗ TypeScript compilation failed"
exit 1
fi
# Step 5: Run Linter
echo ""
echo "Step 5: Running linter..."
if npm run lint 2>&1 | grep -q "error" || [ $? -eq 0 ]; then
echo "✓ Linter check completed"
else
echo "⚠ Linter found issues (non-blocking)"
fi
# Step 6: Verify Routes
echo ""
echo "Step 6: Verifying route registration..."
if grep -q "as4GatewayRoutes" src/integration/api-gateway/app.ts; then
echo "✓ AS4 routes registered"
else
echo "✗ AS4 routes not found in app.ts"
exit 1
fi
echo ""
echo "========================================="
echo "Deployment verification complete!"
echo "========================================="
echo ""
echo "Next steps:"
echo "1. Ensure database is running and accessible"
echo "2. Run migration: npx prisma migrate deploy"
echo "3. Seed marketplace: npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts"
echo "4. Start server: npm run dev"
echo "5. Test endpoints: curl http://localhost:3000/health"
echo ""

50
scripts/fix-database-url.sh Executable file
View File

@@ -0,0 +1,50 @@
#!/bin/bash
# Fix DATABASE_URL in .env file
ENV_FILE=".env"
BACKUP_FILE=".env.backup.$(date +%Y%m%d_%H%M%S)"
if [ ! -f "$ENV_FILE" ]; then
echo "❌ .env file not found"
exit 1
fi
# Backup original
cp "$ENV_FILE" "$BACKUP_FILE"
echo "✅ Backed up .env to $BACKUP_FILE"
# Check if DATABASE_URL needs fixing
if grep -q "DATABASE_URL=postgresql://user:password@host:port/database" "$ENV_FILE"; then
echo "⚠️ Found placeholder DATABASE_URL. Please provide the correct connection string."
echo ""
echo "Expected format: postgresql://user:password@host:port/database"
echo ""
read -p "Enter database host [192.168.11.100]: " DB_HOST
DB_HOST=${DB_HOST:-192.168.11.100}
read -p "Enter database port [5432]: " DB_PORT
DB_PORT=${DB_PORT:-5432}
read -p "Enter database name [dbis_core]: " DB_NAME
DB_NAME=${DB_NAME:-dbis_core}
read -p "Enter database user [dbis]: " DB_USER
DB_USER=${DB_USER:-dbis}
read -sp "Enter database password: " DB_PASS
echo ""
# URL encode password (basic - handles most cases)
DB_PASS_ENCODED=$(echo "$DB_PASS" | sed 's/:/%3A/g; s/@/%40/g; s/#/%23/g; s/\//%2F/g; s/\?/%3F/g; s/&/%26/g; s/=/%3D/g')
NEW_URL="postgresql://${DB_USER}:${DB_PASS_ENCODED}@${DB_HOST}:${DB_PORT}/${DB_NAME}"
# Replace the line
sed -i "s|DATABASE_URL=.*|DATABASE_URL=${NEW_URL}|" "$ENV_FILE"
echo "✅ DATABASE_URL updated in .env file"
echo " Connection: postgresql://${DB_USER}:***@${DB_HOST}:${DB_PORT}/${DB_NAME}"
else
echo "✅ DATABASE_URL appears to be set (not a placeholder)"
echo " Current value: $(grep '^DATABASE_URL' "$ENV_FILE" | sed 's/:[^:@]*@/:***@/g')"
fi

127
scripts/fix-docker-database.sh Executable file
View File

@@ -0,0 +1,127 @@
#!/bin/bash
# Fix Docker Database Configuration
# Ensures database and user are properly configured
set -e
echo "========================================="
echo "Fixing Docker Database Configuration"
echo "========================================="
echo ""
cd "$(dirname "$0")/.."
# Check if Docker Compose services are running
echo "Step 1: Checking Docker services..."
if ! docker compose -f docker/docker-compose.as4.yml ps postgres | grep -q "Up"; then
echo " Starting PostgreSQL service..."
cd docker
docker compose -f docker-compose.as4.yml up -d postgres
cd ..
sleep 5
else
echo " ✓ PostgreSQL service is running"
fi
echo ""
# Wait for PostgreSQL to be ready
echo "Step 2: Waiting for PostgreSQL to be ready..."
for i in {1..30}; do
if docker compose -f docker/docker-compose.as4.yml exec -T postgres pg_isready -U postgres &> /dev/null; then
echo " ✓ PostgreSQL is ready"
break
fi
if [ $i -eq 30 ]; then
echo " ✗ PostgreSQL failed to start"
exit 1
fi
sleep 1
done
echo ""
# Note: Docker Compose uses POSTGRES_USER which creates a superuser with that name
# So dbis_user is already the superuser, we just need to ensure database exists
# Check if database exists
echo "Step 3: Ensuring database exists..."
DB_EXISTS=$(docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -tAc "SELECT 1 FROM pg_database WHERE datname='dbis_core';" 2>/dev/null || echo "0")
if [ "$DB_EXISTS" != "1" ]; then
echo " Creating database 'dbis_core'..."
docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -c "CREATE DATABASE dbis_core;" 2>&1 || true
echo " ✓ Database created"
else
echo " ✓ Database already exists"
fi
echo ""
# Update password if needed (ensure it matches what's in docker-compose)
echo "Step 4: Ensuring user password is set..."
docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -c "ALTER USER dbis_user WITH PASSWORD 'dbis_password';" 2>&1 || true
echo " ✓ Password configured"
echo ""
# Grant privileges
echo "Step 5: Granting privileges..."
docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -d dbis_core -c "GRANT ALL PRIVILEGES ON DATABASE dbis_core TO dbis_user;" 2>&1 || true
docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -c "ALTER USER dbis_user CREATEDB;" 2>&1 || true
echo " ✓ Privileges granted"
echo ""
# Test connection
echo "Step 6: Testing connection..."
if docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -d dbis_core -c "SELECT version();" &> /dev/null; then
PG_VERSION=$(docker compose -f docker/docker-compose.as4.yml exec -T postgres psql -U dbis_user -d dbis_core -c "SELECT version();" -t -A 2>/dev/null | head -1)
echo " ✓ Connection successful"
echo " PostgreSQL version: $PG_VERSION"
else
echo " ✗ Connection failed"
exit 1
fi
echo ""
# Update .env if needed
echo "Step 7: Updating .env file..."
if [ -f .env ]; then
# Update DATABASE_URL to use localhost Docker
if ! grep -q "localhost:5432/dbis_core" .env || ! grep -q "dbis_user:dbis_password" .env; then
echo " Updating DATABASE_URL in .env..."
# Backup .env
cp .env .env.backup.$(date +%Y%m%d_%H%M%S)
# Update or add DATABASE_URL
if grep -q "^DATABASE_URL=" .env; then
sed -i 's|^DATABASE_URL=.*|DATABASE_URL=postgresql://dbis_user:dbis_password@localhost:5432/dbis_core|' .env
else
echo "DATABASE_URL=postgresql://dbis_user:dbis_password@localhost:5432/dbis_core" >> .env
fi
echo " ✓ .env updated"
else
echo " ✓ DATABASE_URL already configured correctly"
fi
else
echo " Creating .env file..."
cat > .env <<EOF
# Database (Docker Compose)
DATABASE_URL=postgresql://dbis_user:dbis_password@localhost:5432/dbis_core
# Redis (Docker Compose)
REDIS_URL=redis://localhost:6379
# AS4 Configuration
AS4_BASE_URL=http://localhost:3000
AS4_LOG_LEVEL=debug
NODE_ENV=development
EOF
echo " ✓ .env file created"
fi
echo ""
echo "========================================="
echo "Database Configuration Fixed!"
echo "========================================="
echo ""
echo "Next steps:"
echo "1. Run migration: npx prisma migrate deploy"
echo "2. Seed marketplace: npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts"
echo "3. Start server: npm run dev"
echo ""

View File

@@ -0,0 +1,92 @@
#!/bin/bash
# Generate AS4 Certificates
# Creates TLS, signing, and encryption certificates for AS4 Settlement
set -e
CERT_DIR="${AS4_CERT_DIR:-./certs/as4}"
DAYS_VALID="${AS4_CERT_DAYS:-365}"
echo "========================================="
echo "AS4 Certificate Generation"
echo "========================================="
# Create certificate directory
mkdir -p "$CERT_DIR"
chmod 700 "$CERT_DIR"
echo ""
echo "Generating certificates in: $CERT_DIR"
echo "Validity: $DAYS_VALID days"
echo ""
# Generate TLS Certificate
echo "1. Generating TLS Certificate..."
openssl req -x509 -newkey rsa:2048 \
-keyout "$CERT_DIR/as4-tls-key.pem" \
-out "$CERT_DIR/as4-tls-cert.pem" \
-days "$DAYS_VALID" -nodes \
-subj "/CN=as4.dbis.org/O=DBIS/C=US/ST=DC/L=Washington" 2>/dev/null
chmod 600 "$CERT_DIR/as4-tls-key.pem"
chmod 644 "$CERT_DIR/as4-tls-cert.pem"
# Calculate TLS fingerprint
TLS_FINGERPRINT=$(openssl x509 -fingerprint -sha256 -noout -in "$CERT_DIR/as4-tls-cert.pem" | cut -d'=' -f2 | tr -d ':')
echo " TLS Fingerprint: $TLS_FINGERPRINT"
# Generate Signing Certificate
echo ""
echo "2. Generating Signing Certificate..."
openssl req -x509 -newkey rsa:2048 \
-keyout "$CERT_DIR/as4-signing-key.pem" \
-out "$CERT_DIR/as4-signing-cert.pem" \
-days "$DAYS_VALID" -nodes \
-subj "/CN=DBIS AS4 Signing/O=DBIS/C=US/ST=DC/L=Washington" 2>/dev/null
chmod 600 "$CERT_DIR/as4-signing-key.pem"
chmod 644 "$CERT_DIR/as4-signing-cert.pem"
# Calculate signing fingerprint
SIGNING_FINGERPRINT=$(openssl x509 -fingerprint -sha256 -noout -in "$CERT_DIR/as4-signing-cert.pem" | cut -d'=' -f2 | tr -d ':')
echo " Signing Fingerprint: $SIGNING_FINGERPRINT"
# Generate Encryption Certificate
echo ""
echo "3. Generating Encryption Certificate..."
openssl req -x509 -newkey rsa:2048 \
-keyout "$CERT_DIR/as4-encryption-key.pem" \
-out "$CERT_DIR/as4-encryption-cert.pem" \
-days "$DAYS_VALID" -nodes \
-subj "/CN=DBIS AS4 Encryption/O=DBIS/C=US/ST=DC/L=Washington" 2>/dev/null
chmod 600 "$CERT_DIR/as4-encryption-key.pem"
chmod 644 "$CERT_DIR/as4-encryption-cert.pem"
# Calculate encryption fingerprint
ENCRYPTION_FINGERPRINT=$(openssl x509 -fingerprint -sha256 -noout -in "$CERT_DIR/as4-encryption-cert.pem" | cut -d'=' -f2 | tr -d ':')
echo " Encryption Fingerprint: $ENCRYPTION_FINGERPRINT"
# Save fingerprints to file
cat > "$CERT_DIR/fingerprints.txt" <<EOF
# AS4 Certificate Fingerprints
# Generated: $(date -Iseconds)
TLS_FINGERPRINT=$TLS_FINGERPRINT
SIGNING_FINGERPRINT=$SIGNING_FINGERPRINT
ENCRYPTION_FINGERPRINT=$ENCRYPTION_FINGERPRINT
EOF
echo ""
echo "========================================="
echo "Certificate Generation Complete!"
echo "========================================="
echo ""
echo "Certificates saved to: $CERT_DIR"
echo "Fingerprints saved to: $CERT_DIR/fingerprints.txt"
echo ""
echo "Next steps:"
echo "1. Update .env with certificate paths"
echo "2. Update .env with fingerprints"
echo "3. Register certificates in Member Directory"
echo ""

View File

@@ -0,0 +1,85 @@
#!/bin/bash
# Grant Database Permissions for dbis user
# Run this on the Proxmox host to grant permissions in the database container
set -e
VMID="${VMID:-10100}"
DB_NAME="${DB_NAME:-dbis_core}"
DB_USER="${DB_USER:-dbis}"
echo "=========================================="
echo "Granting Database Permissions"
echo "=========================================="
echo ""
echo "VMID: $VMID"
echo "Database: $DB_NAME"
echo "User: $DB_USER"
echo ""
# Check if pct command exists (must run on Proxmox host)
if ! command -v pct &> /dev/null; then
echo "❌ Error: This script must be run on the Proxmox host (pct command not found)"
echo ""
echo "Alternative: Run these commands manually:"
echo " ssh root@192.168.11.10"
echo " pct exec $VMID -- bash"
echo " su - postgres -c \"psql -d $DB_NAME\""
echo ""
exit 1
fi
# Check if container exists
if ! pct list | grep -q "^\s*$VMID\s"; then
echo "❌ Error: Container $VMID not found"
exit 1
fi
echo "Step 1: Granting database-level permissions..."
pct exec "$VMID" -- bash -c "su - postgres -c \"psql -d postgres << 'EOF'
GRANT CONNECT ON DATABASE $DB_NAME TO $DB_USER;
GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;
ALTER USER $DB_USER CREATEDB;
EOF\""
if [ $? -ne 0 ]; then
echo "❌ Failed to grant database-level permissions"
exit 1
fi
echo "✅ Database-level permissions granted"
echo ""
echo "Step 2: Granting schema-level permissions..."
pct exec "$VMID" -- bash -c "su - postgres -c \"psql -d $DB_NAME << 'EOF'
GRANT ALL ON SCHEMA public TO $DB_USER;
GRANT CREATE ON SCHEMA public TO $DB_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO $DB_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO $DB_USER;
EOF\""
if [ $? -ne 0 ]; then
echo "❌ Failed to grant schema-level permissions"
exit 1
fi
echo "✅ Schema-level permissions granted"
echo ""
echo "Step 3: Verifying permissions..."
pct exec "$VMID" -- bash -c "su - postgres -c \"psql -d $DB_NAME -c 'SELECT current_user, current_database();'\"" > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo "✅ Permissions verified - user $DB_USER can connect to $DB_NAME"
else
echo "⚠️ Verification had issues, but permissions may still be granted"
fi
echo ""
echo "=========================================="
echo "✅ Database permissions granted!"
echo "=========================================="
echo ""
echo "Next step: Run the migration from your local machine:"
echo " cd /home/intlc/projects/proxmox/dbis_core"
echo " ./scripts/run-chart-of-accounts-migration.sh"

View File

@@ -0,0 +1,88 @@
#!/bin/bash
# Grant Database Permissions via SSH to Proxmox Host
set -e
PROXMOX_HOST="${PROXMOX_HOST:-192.168.11.10}"
VMID="${VMID:-10100}"
DB_NAME="${DB_NAME:-dbis_core}"
DB_USER="${DB_USER:-dbis}"
echo "=========================================="
echo "Granting Database Permissions (Remote)"
echo "=========================================="
echo ""
echo "Proxmox Host: $PROXMOX_HOST"
echo "VMID: $VMID"
echo "Database: $DB_NAME"
echo "User: $DB_USER"
echo ""
# Check if we can SSH to Proxmox host
if ! ssh -o ConnectTimeout=5 -o BatchMode=yes root@"$PROXMOX_HOST" exit 2>/dev/null; then
echo "⚠️ Cannot SSH to Proxmox host ($PROXMOX_HOST)"
echo " Please ensure:"
echo " 1. SSH key is set up for root@$PROXMOX_HOST"
echo " 2. Host is reachable"
echo ""
echo " Or run manually on Proxmox host:"
echo " pct exec $VMID -- bash -c \"su - postgres -c \\\"psql -d $DB_NAME << 'EOF'"
echo " GRANT CONNECT ON DATABASE $DB_NAME TO $DB_USER;"
echo " GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
echo " ALTER USER $DB_USER CREATEDB;"
echo " \\\\c $DB_NAME"
echo " GRANT ALL ON SCHEMA public TO $DB_USER;"
echo " GRANT CREATE ON SCHEMA public TO $DB_USER;"
echo " ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO $DB_USER;"
echo " ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO $DB_USER;"
echo " EOF\\\"\""
exit 1
fi
echo "✅ Connected to Proxmox host"
echo ""
echo "Step 1: Granting database-level permissions..."
ssh root@"$PROXMOX_HOST" "pct exec $VMID -- bash -c \"su - postgres -c \\\"psql -d postgres << 'EOF'
GRANT CONNECT ON DATABASE $DB_NAME TO $DB_USER;
GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;
ALTER USER $DB_USER CREATEDB;
EOF\\\"\""
if [ $? -ne 0 ]; then
echo "❌ Failed to grant database-level permissions"
exit 1
fi
echo "✅ Database-level permissions granted"
echo ""
echo "Step 2: Granting schema-level permissions..."
ssh root@"$PROXMOX_HOST" "pct exec $VMID -- bash -c \"su - postgres -c \\\"psql -d $DB_NAME << 'EOF'
GRANT ALL ON SCHEMA public TO $DB_USER;
GRANT CREATE ON SCHEMA public TO $DB_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO $DB_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO $DB_USER;
EOF\\\"\""
if [ $? -ne 0 ]; then
echo "❌ Failed to grant schema-level permissions"
exit 1
fi
echo "✅ Schema-level permissions granted"
echo ""
echo "Step 3: Verifying permissions..."
ssh root@"$PROXMOX_HOST" "pct exec $VMID -- bash -c \"su - postgres -c \\\"psql -d $DB_NAME -c 'SELECT current_user, current_database();'\\\"\"" > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo "✅ Permissions verified"
else
echo "⚠️ Verification had issues, but permissions may still be granted"
fi
echo ""
echo "=========================================="
echo "✅ Database permissions granted!"
echo "=========================================="

23
scripts/grant-permissions.sh Executable file
View File

@@ -0,0 +1,23 @@
#!/bin/bash
# Grant Database Permissions - Run on Proxmox Host
VMID="${1:-10100}"
DB_NAME="${2:-dbis_core}"
DB_USER="${3:-dbis}"
echo "Granting permissions for $DB_USER on $DB_NAME (VMID: $VMID)..."
pct exec "$VMID" -- bash -c "su - postgres -c \"psql -d postgres << 'EOF'
GRANT CONNECT ON DATABASE $DB_NAME TO $DB_USER;
GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;
ALTER USER $DB_USER CREATEDB;
EOF\""
pct exec "$VMID" -- bash -c "su - postgres -c \"psql -d $DB_NAME << 'EOF'
GRANT ALL ON SCHEMA public TO $DB_USER;
GRANT CREATE ON SCHEMA public TO $DB_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO $DB_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO $DB_USER;
EOF\""
echo "✅ Permissions granted!"

View File

@@ -0,0 +1,117 @@
/**
* Initialize Chart of Accounts - Simplified Version
* Direct database access without service layer
*/
import { PrismaClient } from '@prisma/client';
import { v4 as uuidv4 } from 'uuid';
const prisma = new PrismaClient();
const accounts = [
// Assets
{ code: '1000', name: 'ASSETS', category: 'ASSET', level: 1, balance: 'DEBIT', type: 'Asset', usgaap: 'Assets', ifrs: 'Assets', desc: 'Total Assets', system: true },
{ code: '1100', name: 'Current Assets', category: 'ASSET', parent: '1000', level: 2, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Current Assets', ifrs: 'Current Assets', desc: 'Assets expected to be converted to cash within one year', system: true },
{ code: '1110', name: 'Cash and Cash Equivalents', category: 'ASSET', parent: '1100', level: 3, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Cash and Cash Equivalents', ifrs: 'Cash and Cash Equivalents', desc: 'Cash on hand and in banks, short-term investments', system: true },
{ code: '1111', name: 'Cash on Hand', category: 'ASSET', parent: '1110', level: 4, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Cash', ifrs: 'Cash', desc: 'Physical currency and coins', system: false },
{ code: '1112', name: 'Cash in Banks', category: 'ASSET', parent: '1110', level: 4, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Cash', ifrs: 'Cash', desc: 'Deposits in commercial banks', system: false },
{ code: '1120', name: 'Accounts Receivable', category: 'ASSET', parent: '1100', level: 3, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Trade Receivables', ifrs: 'Trade Receivables', desc: 'Amounts owed by customers and counterparties', system: true },
{ code: '1130', name: 'Settlement Assets', category: 'ASSET', parent: '1100', level: 3, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Other Current Assets', ifrs: 'Other Current Assets', desc: 'Assets held for settlement purposes', system: true },
{ code: '1140', name: 'CBDC Holdings', category: 'ASSET', parent: '1100', level: 3, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Digital Assets', ifrs: 'Cryptocurrency Assets', desc: 'Central Bank Digital Currency holdings', system: true },
{ code: '1150', name: 'GRU Holdings', category: 'ASSET', parent: '1100', level: 3, balance: 'DEBIT', type: 'Current Asset', usgaap: 'Digital Assets', ifrs: 'Financial Assets', desc: 'Global Reserve Unit holdings', system: true },
{ code: '1200', name: 'Non-Current Assets', category: 'ASSET', parent: '1000', level: 2, balance: 'DEBIT', type: 'Non-Current Asset', usgaap: 'Non-Current Assets', ifrs: 'Non-Current Assets', desc: 'Long-term assets', system: true },
{ code: '1210', name: 'Property, Plant and Equipment', category: 'ASSET', parent: '1200', level: 3, balance: 'DEBIT', type: 'Non-Current Asset', usgaap: 'Property, Plant and Equipment', ifrs: 'Property, Plant and Equipment', desc: 'Tangible fixed assets', system: true },
// Liabilities
{ code: '2000', name: 'LIABILITIES', category: 'LIABILITY', level: 1, balance: 'CREDIT', type: 'Liability', usgaap: 'Liabilities', ifrs: 'Liabilities', desc: 'Total Liabilities', system: true },
{ code: '2100', name: 'Current Liabilities', category: 'LIABILITY', parent: '2000', level: 2, balance: 'CREDIT', type: 'Current Liability', usgaap: 'Current Liabilities', ifrs: 'Current Liabilities', desc: 'Obligations due within one year', system: true },
{ code: '2110', name: 'Accounts Payable', category: 'LIABILITY', parent: '2100', level: 3, balance: 'CREDIT', type: 'Current Liability', usgaap: 'Accounts Payable', ifrs: 'Trade Payables', desc: 'Amounts owed to suppliers and counterparties', system: true },
{ code: '2140', name: 'CBDC Liabilities', category: 'LIABILITY', parent: '2100', level: 3, balance: 'CREDIT', type: 'Current Liability', usgaap: 'Digital Currency Liabilities', ifrs: 'Financial Liabilities', desc: 'CBDC issued and outstanding', system: true },
{ code: '2150', name: 'GRU Liabilities', category: 'LIABILITY', parent: '2100', level: 3, balance: 'CREDIT', type: 'Current Liability', usgaap: 'Digital Currency Liabilities', ifrs: 'Financial Liabilities', desc: 'GRU issued and outstanding', system: true },
// Equity
{ code: '3000', name: 'EQUITY', category: 'EQUITY', level: 1, balance: 'CREDIT', type: 'Equity', usgaap: 'Equity', ifrs: 'Equity', desc: 'Total Equity', system: true },
{ code: '3100', name: 'Capital', category: 'EQUITY', parent: '3000', level: 2, balance: 'CREDIT', type: 'Equity', usgaap: 'Stockholders Equity', ifrs: 'Share Capital', desc: 'Paid-in capital', system: true },
{ code: '3200', name: 'Retained Earnings', category: 'EQUITY', parent: '3000', level: 2, balance: 'CREDIT', type: 'Equity', usgaap: 'Retained Earnings', ifrs: 'Retained Earnings', desc: 'Accumulated net income', system: true },
// Revenue
{ code: '4000', name: 'REVENUE', category: 'REVENUE', level: 1, balance: 'CREDIT', type: 'Revenue', usgaap: 'Revenue', ifrs: 'Revenue', desc: 'Total Revenue', system: true },
{ code: '4100', name: 'Operating Revenue', category: 'REVENUE', parent: '4000', level: 2, balance: 'CREDIT', type: 'Revenue', usgaap: 'Operating Revenue', ifrs: 'Revenue from Contracts with Customers', desc: 'Revenue from primary operations', system: true },
{ code: '4110', name: 'Interest Income', category: 'REVENUE', parent: '4100', level: 3, balance: 'CREDIT', type: 'Revenue', usgaap: 'Interest Income', ifrs: 'Interest Income', desc: 'Interest earned on loans and investments', system: true },
// Expenses
{ code: '5000', name: 'EXPENSES', category: 'EXPENSE', level: 1, balance: 'DEBIT', type: 'Expense', usgaap: 'Expenses', ifrs: 'Expenses', desc: 'Total Expenses', system: true },
{ code: '5100', name: 'Operating Expenses', category: 'EXPENSE', parent: '5000', level: 2, balance: 'DEBIT', type: 'Expense', usgaap: 'Operating Expenses', ifrs: 'Operating Expenses', desc: 'Expenses from primary operations', system: true },
{ code: '5110', name: 'Interest Expense', category: 'EXPENSE', parent: '5100', level: 3, balance: 'DEBIT', type: 'Expense', usgaap: 'Interest Expense', ifrs: 'Finance Costs', desc: 'Interest paid on borrowings', system: true },
];
async function initialize() {
try {
console.log('Initializing Chart of Accounts...');
let count = 0;
for (const acc of accounts) {
await prisma.chartOfAccount.upsert({
where: { accountCode: acc.code },
update: {
accountName: acc.name,
category: acc.category,
parentAccountCode: acc.parent || null,
level: acc.level,
normalBalance: acc.balance,
accountType: acc.type,
usgaapClassification: acc.usgaap,
ifrsClassification: acc.ifrs,
description: acc.desc,
isActive: true,
},
create: {
id: uuidv4(),
accountCode: acc.code,
accountName: acc.name,
category: acc.category,
parentAccountCode: acc.parent || null,
level: acc.level,
normalBalance: acc.balance,
accountType: acc.type,
usgaapClassification: acc.usgaap,
ifrsClassification: acc.ifrs,
description: acc.desc,
isActive: true,
isSystemAccount: acc.system,
metadata: {},
},
});
count++;
}
console.log(`✅ Chart of Accounts initialized successfully!`);
console.log(`✅ Total accounts created: ${count}`);
// Show summary
const summary = await prisma.chartOfAccount.groupBy({
by: ['category'],
where: { isActive: true },
_count: { id: true },
});
console.log('\n📊 Account Summary:');
for (const s of summary) {
console.log(` ${s.category}: ${s._count.id}`);
}
process.exit(0);
} catch (error: any) {
console.error('❌ Error initializing Chart of Accounts:', error.message);
console.error(error.stack);
process.exit(1);
} finally {
await prisma.$disconnect();
}
}
if (require.main === module) {
initialize();
}
export { initialize };

View File

@@ -0,0 +1,76 @@
/**
* Initialize Chart of Accounts
*
* This script initializes the standard Chart of Accounts
* Run this after the migration has been applied.
*
* Usage:
* ts-node scripts/initialize-chart-of-accounts.ts
* or
* npm run build && node dist/scripts/initialize-chart-of-accounts.js
*/
// Use relative import to avoid path alias issues
import { chartOfAccountsService, AccountCategory } from '../src/core/accounting/chart-of-accounts.service';
// Register tsconfig paths if needed
import { register } from 'tsconfig-paths';
import * as path from 'path';
const tsConfig = require('../tsconfig.json');
const baseUrl = path.resolve(__dirname, '..', tsConfig.compilerOptions.baseUrl || '.');
register({
baseUrl,
paths: tsConfig.compilerOptions.paths || {},
});
async function initializeChartOfAccounts() {
try {
console.log('Initializing Chart of Accounts...');
await chartOfAccountsService.initializeChartOfAccounts();
console.log('✅ Chart of Accounts initialized successfully!');
// Verify by getting account count
const accounts = await chartOfAccountsService.getChartOfAccounts();
console.log(`✅ Total accounts created: ${accounts.length}`);
// Show summary by category
const assets = await chartOfAccountsService.getAccountsByCategory(
AccountCategory.ASSET
);
const liabilities = await chartOfAccountsService.getAccountsByCategory(
AccountCategory.LIABILITY
);
const equity = await chartOfAccountsService.getAccountsByCategory(
AccountCategory.EQUITY
);
const revenue = await chartOfAccountsService.getAccountsByCategory(
AccountCategory.REVENUE
);
const expenses = await chartOfAccountsService.getAccountsByCategory(
AccountCategory.EXPENSE
);
console.log('\n📊 Account Summary:');
console.log(` Assets: ${assets.length}`);
console.log(` Liabilities: ${liabilities.length}`);
console.log(` Equity: ${equity.length}`);
console.log(` Revenue: ${revenue.length}`);
console.log(` Expenses: ${expenses.length}`);
process.exit(0);
} catch (error: any) {
console.error('❌ Error initializing Chart of Accounts:', error.message);
console.error(error.stack);
process.exit(1);
}
}
// Run if called directly
if (require.main === module) {
initializeChartOfAccounts();
}
export { initializeChartOfAccounts };

30
scripts/load-test-as4.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
# AS4 Settlement Load Testing Script
# Basic load test using curl
set -e
BASE_URL="${AS4_BASE_URL:-http://localhost:3000}"
CONCURRENT="${AS4_LOAD_CONCURRENT:-10}"
REQUESTS="${AS4_LOAD_REQUESTS:-100}"
echo "========================================="
echo "AS4 Settlement Load Test"
echo "========================================="
echo "Base URL: $BASE_URL"
echo "Concurrent: $CONCURRENT"
echo "Total Requests: $REQUESTS"
echo ""
# Test health endpoint
echo "Testing health endpoint..."
time for i in $(seq 1 $REQUESTS); do
curl -s "$BASE_URL/health" > /dev/null &
if [ $((i % $CONCURRENT)) -eq 0 ]; then
wait
fi
done
wait
echo ""
echo "Load test complete!"

112
scripts/monitor-outbox.sh Executable file
View File

@@ -0,0 +1,112 @@
#!/bin/bash
# Monitor Dual Ledger Outbox Queue
# Shows queue depth, failed jobs, and processing stats
set -e
# Load database URL from environment
DATABASE_URL="${DATABASE_URL:-${1:-postgresql://user:password@localhost:5432/dbis}}"
echo "=== Dual Ledger Outbox Queue Status ==="
echo ""
# Queue depth by status
echo "📊 Queue Depth by Status:"
psql "$DATABASE_URL" -c "
SELECT
status,
COUNT(*) as count,
MIN(created_at) as oldest_job,
MAX(created_at) as newest_job
FROM dual_ledger_outbox
GROUP BY status
ORDER BY
CASE status
WHEN 'QUEUED' THEN 1
WHEN 'SENT' THEN 2
WHEN 'ACKED' THEN 3
WHEN 'FINALIZED' THEN 4
WHEN 'FAILED' THEN 5
END;
"
echo ""
# Failed jobs needing attention
echo "⚠️ Failed Jobs (last 10):"
psql "$DATABASE_URL" -c "
SELECT
outbox_id,
sovereign_bank_id,
attempts,
last_error,
last_attempt_at,
created_at
FROM dual_ledger_outbox
WHERE status = 'FAILED'
ORDER BY last_attempt_at DESC
LIMIT 10;
"
echo ""
# Jobs stuck in SENT status (may need manual intervention)
echo "🔍 Jobs Stuck in SENT Status (> 5 minutes):"
psql "$DATABASE_URL" -c "
SELECT
outbox_id,
sovereign_bank_id,
attempts,
last_attempt_at,
AGE(now(), last_attempt_at) as stuck_duration
FROM dual_ledger_outbox
WHERE status = 'SENT'
AND last_attempt_at < now() - INTERVAL '5 minutes'
ORDER BY last_attempt_at ASC
LIMIT 10;
"
echo ""
# Processing rate (last hour)
echo "⚡ Processing Rate (last hour):"
psql "$DATABASE_URL" -c "
SELECT
status,
COUNT(*) as count,
COUNT(*) FILTER (WHERE finalized_at > now() - INTERVAL '1 hour') as last_hour
FROM dual_ledger_outbox
WHERE created_at > now() - INTERVAL '1 hour'
OR finalized_at > now() - INTERVAL '1 hour'
GROUP BY status
ORDER BY count DESC;
"
echo ""
# Average processing time
echo "⏱️ Average Processing Times:"
psql "$DATABASE_URL" -c "
SELECT
AVG(EXTRACT(EPOCH FROM (acked_at - created_at))) as avg_queue_to_ack_seconds,
AVG(EXTRACT(EPOCH FROM (finalized_at - acked_at))) as avg_ack_to_final_seconds,
AVG(EXTRACT(EPOCH FROM (finalized_at - created_at))) as avg_total_seconds,
COUNT(*) as completed_jobs
FROM dual_ledger_outbox
WHERE status = 'FINALIZED'
AND finalized_at > now() - INTERVAL '24 hours';
"
echo ""
# Sovereign bank breakdown
echo "🌐 Jobs by Sovereign Bank:"
psql "$DATABASE_URL" -c "
SELECT
sovereign_bank_id,
status,
COUNT(*) as count
FROM dual_ledger_outbox
GROUP BY sovereign_bank_id, status
ORDER BY sovereign_bank_id, status;
"

View File

@@ -0,0 +1,66 @@
// Provision Admin Vault for Sankofa Admin Portal
// Creates the admin vault using the provisioning service
import { adminVaultProvisioningService } from '../src/core/iru/provisioning/admin-vault-provisioning.service';
async function main() {
const args = process.argv.slice(2);
// Parse arguments
let orgName = 'Sankofa Admin';
let vaultName = 'sankofa-admin';
let adminLevel: 'super_admin' | 'admin' | 'operator' = 'super_admin';
for (let i = 0; i < args.length; i++) {
if (args[i] === '--org' && args[i + 1]) {
orgName = args[i + 1];
i++;
} else if (args[i] === '--name' && args[i + 1]) {
vaultName = args[i + 1];
i++;
} else if (args[i] === '--level' && args[i + 1]) {
adminLevel = args[i + 1] as 'super_admin' | 'admin' | 'operator';
i++;
}
}
console.log('Provisioning Admin Vault...');
console.log(` Organization: ${orgName}`);
console.log(` Vault Name: ${vaultName}`);
console.log(` Admin Level: ${adminLevel}`);
console.log('');
try {
const result = await adminVaultProvisioningService.provisionAdminVault({
organizationName: orgName,
vaultName: vaultName,
adminLevel: adminLevel,
features: {
auditLogging: true,
backupEnabled: true,
encryptionLevel: 'enhanced',
},
});
console.log('✅ Admin vault provisioned successfully!');
console.log('');
console.log('Vault Details:');
console.log(` Vault ID: ${result.vaultId}`);
console.log(` Vault Path: ${result.vaultPath}`);
console.log(` API Endpoint: ${result.apiEndpoint}`);
console.log(` Role ID: ${result.roleId}`);
console.log(` Secret ID: ${result.secretId}`);
console.log('');
console.log('⚠️ IMPORTANT: Store these credentials securely!');
console.log('');
console.log('Next steps:');
console.log('1. Run migration script: ./scripts/migrate-secrets-to-admin-vault.sh');
console.log('2. Store credentials in secure location');
console.log('3. Update applications to use admin vault');
} catch (error) {
console.error('❌ Failed to provision admin vault:', error);
process.exit(1);
}
}
main();

View File

@@ -0,0 +1,83 @@
#!/bin/bash
# Run Chart of Accounts Migration and Initialization
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
echo "=========================================="
echo "Chart of Accounts Migration & Setup"
echo "=========================================="
echo ""
# Check if DATABASE_URL is set
if [ -z "$DATABASE_URL" ]; then
echo "⚠️ DATABASE_URL not set. Checking for .env file..."
if [ -f .env ]; then
echo "✅ Found .env file, loading environment variables..."
export $(cat .env | grep -v '^#' | xargs)
else
echo "❌ Error: DATABASE_URL not set and no .env file found."
echo ""
echo "Please set DATABASE_URL or create a .env file with:"
echo " DATABASE_URL=postgresql://user:password@host:port/database"
echo ""
exit 1
fi
fi
# Source nvm if available (for Node.js environment)
if [ -f "$HOME/.nvm/nvm.sh" ]; then
source "$HOME/.nvm/nvm.sh" 2>/dev/null || true
elif [ -f "/root/.nvm/nvm.sh" ]; then
source "/root/.nvm/nvm.sh" 2>/dev/null || true
fi
# Use local prisma if available, otherwise try npx
PRISMA_CMD=""
if [ -f "./node_modules/.bin/prisma" ]; then
PRISMA_CMD="./node_modules/.bin/prisma"
elif command -v npx &> /dev/null; then
PRISMA_CMD="npx prisma"
else
echo "❌ Error: Prisma not found. Please install dependencies with 'npm install'"
exit 1
fi
echo "Step 1: Generating Prisma client..."
$PRISMA_CMD generate
echo ""
echo "Step 2: Creating migration..."
$PRISMA_CMD migrate dev --name add_chart_of_accounts
echo ""
echo "Step 3: Initializing Chart of Accounts..."
# Try to use ts-node from node_modules first
if [ -f "./node_modules/.bin/ts-node" ]; then
./node_modules/.bin/ts-node scripts/initialize-chart-of-accounts.ts
elif command -v ts-node &> /dev/null; then
ts-node scripts/initialize-chart-of-accounts.ts
elif [ -f "dist/scripts/initialize-chart-of-accounts.js" ]; then
node dist/scripts/initialize-chart-of-accounts.js
else
echo "⚠️ TypeScript not compiled. Building first..."
if [ -f "./node_modules/.bin/npm" ]; then
./node_modules/.bin/npm run build
elif command -v npm &> /dev/null; then
npm run build
else
echo "❌ Error: npm not found. Cannot build TypeScript."
exit 1
fi
node dist/scripts/initialize-chart-of-accounts.js
fi
echo ""
echo "=========================================="
echo "✅ Chart of Accounts setup complete!"
echo "=========================================="

106
scripts/run-migrations.sh Executable file
View File

@@ -0,0 +1,106 @@
#!/bin/bash
# Run Ledger Correctness Migrations
# Executes all migrations in order with error checking
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
MIGRATIONS_DIR="$PROJECT_ROOT/db/migrations"
# Load database URL from environment or use default
DATABASE_URL="${DATABASE_URL:-${1:-postgresql://user:password@localhost:5432/dbis}}"
echo "=== Running Ledger Correctness Migrations ==="
echo "Database: $DATABASE_URL"
echo "Migrations directory: $MIGRATIONS_DIR"
echo ""
# Check if migrations directory exists
if [ ! -d "$MIGRATIONS_DIR" ]; then
echo "❌ Migrations directory not found: $MIGRATIONS_DIR"
exit 1
fi
# List of migrations in order
MIGRATIONS=(
"001_ledger_idempotency.sql"
"002_dual_ledger_outbox.sql"
"003_outbox_state_machine.sql"
"004_balance_constraints.sql"
"005_post_ledger_entry.sql"
)
# Function to check if migration was already applied
check_migration_applied() {
local migration_name=$1
# This assumes you have a migrations tracking table
# Adjust based on your migration tracking system
psql "$DATABASE_URL" -tAc "SELECT 1 FROM schema_migrations WHERE version = '$migration_name'" 2>/dev/null || echo "0"
}
# Run each migration
for migration in "${MIGRATIONS[@]}"; do
migration_path="$MIGRATIONS_DIR/$migration"
if [ ! -f "$migration_path" ]; then
echo "⚠️ Migration file not found: $migration"
continue
fi
echo "Running: $migration"
# Run migration
if psql "$DATABASE_URL" -f "$migration_path"; then
echo "$migration completed successfully"
echo ""
else
echo "$migration failed"
exit 1
fi
done
echo "=== All migrations completed successfully ==="
# Verify migrations
echo ""
echo "=== Verifying migrations ==="
# Check idempotency constraint
echo "Checking idempotency constraint..."
psql "$DATABASE_URL" -tAc "
SELECT CASE
WHEN EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'ledger_entries_unique_ledger_reference'
) THEN '✅ Idempotency constraint exists'
ELSE '❌ Idempotency constraint missing'
END;
"
# Check outbox table
echo "Checking outbox table..."
psql "$DATABASE_URL" -tAc "
SELECT CASE
WHEN EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_name = 'dual_ledger_outbox'
) THEN '✅ Outbox table exists'
ELSE '❌ Outbox table missing'
END;
"
# Check posting function
echo "Checking posting function..."
psql "$DATABASE_URL" -tAc "
SELECT CASE
WHEN EXISTS (
SELECT 1 FROM pg_proc
WHERE proname = 'post_ledger_entry'
) THEN '✅ Posting function exists'
ELSE '❌ Posting function missing'
END;
"
echo ""
echo "=== Migration verification complete ==="

View File

@@ -0,0 +1,89 @@
// Seed AS4 Settlement Marketplace Offering
// Adds AS4 Settlement Master Service to Sankofa Phoenix Marketplace
import { PrismaClient } from '@prisma/client';
import { v4 as uuidv4 } from 'uuid';
const prisma = new PrismaClient();
async function main() {
console.log('Seeding AS4 Settlement Marketplace Offering...');
// Check if offering already exists
const existing = await prisma.iruOffering.findUnique({
where: { offeringId: 'AS4-SETTLEMENT-MASTER' },
});
if (existing) {
console.log('AS4 Settlement offering already exists, skipping...');
return;
}
// Create offering
const offering = await prisma.iruOffering.create({
data: {
id: uuidv4(),
offeringId: 'AS4-SETTLEMENT-MASTER',
name: 'AS4 Settlement Master Service',
description:
'Final settlement institution providing SWIFT-FIN equivalent instruction and confirmation flows (MT202/MT910 semantics) over a custom AS4 gateway, with settlement posting on the DBIS ledger (ChainID 138).',
capacityTier: 1, // Central Banks and Settlement Banks
institutionalType: 'SettlementBank',
pricingModel: 'Hybrid', // Subscription + Usage-based
basePrice: 10000, // $10,000/month base
currency: 'USD',
features: {
messageTypes: ['DBIS.SI.202', 'DBIS.SI.202COV', 'DBIS.AD.900', 'DBIS.AD.910'],
capabilities: [
'AS4 Gateway',
'Settlement Core',
'Member Directory',
'Compliance Gates',
'Ledger Integration',
'ChainID 138 Anchoring',
],
supportedCurrencies: ['USD', 'EUR', 'GBP', 'XAU', 'XAG'],
finality: 'IMMEDIATE',
availability: '99.9%',
},
technicalSpecs: {
as4Version: 'ebMS3/AS4',
ledgerMode: 'HYBRID',
chainId: 138,
messageFormat: 'JSON',
signingAlgorithm: 'RSA-SHA256',
encryptionAlgorithm: 'AES-256-GCM',
tlsVersion: '1.3',
},
legalFramework: {
rulebook: 'DBIS AS4 Settlement Member Rulebook v1.0',
compliance: ['AML/CTF', 'Sanctions Screening', 'KYC/KYB'],
audit: 'IMMUTABLE_WORM_STORAGE',
},
regulatoryPosition: {
status: 'REGULATED',
jurisdictions: ['GLOBAL'],
licensing: 'REQUIRED',
},
documents: {
rulebook: '/docs/settlement/as4/MEMBER_RULEBOOK_V1.md',
pkiModel: '/docs/settlement/as4/PKI_CA_MODEL.md',
directorySpec: '/docs/settlement/as4/DIRECTORY_SERVICE_SPEC.md',
threatModel: '/docs/settlement/as4/THREAT_MODEL_CONTROL_CATALOG.md',
},
status: 'active',
displayOrder: 10,
},
});
console.log('AS4 Settlement Marketplace Offering created:', offering.offeringId);
}
main()
.catch((e) => {
console.error('Error seeding AS4 Settlement offering:', e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});

View File

@@ -0,0 +1,185 @@
// Seed DBIS Core Banking Private Offering
// Adds DBIS Core Banking System as a private offering in Phoenix Portal
import { PrismaClient } from '@prisma/client';
import { v4 as uuidv4 } from 'uuid';
const prisma = new PrismaClient();
async function main() {
console.log('Seeding DBIS Core Banking private offering...');
const offeringId = 'DBIS-CORE-BANKING-PRIVATE';
const existingOffering = await prisma.iruOffering.findUnique({
where: { offeringId },
});
if (existingOffering) {
console.log(`Offering ${offeringId} already exists. Updating...`);
await prisma.iruOffering.update({
where: { offeringId },
data: {
name: 'DBIS Core Banking System',
description: 'Sovereign-grade financial infrastructure for the Digital Bank of International Settlements and 33 Sovereign Central Banks. Complete core banking system with neural consensus engine, global quantum ledger, autonomous regulatory intelligence, and sovereign AI risk engine.',
capacityTier: 1, // Tier 1: Central Banks only
institutionalType: 'central_bank', // Private offering for central banks
pricingModel: 'private', // Private offering, not publicly listed
basePrice: null, // Pricing negotiated privately
currency: 'USD',
features: {
coreBanking: true,
neuralConsensusEngine: true,
globalQuantumLedger: true,
autonomousRegulatoryIntelligence: true,
sovereignAIRiskEngine: true,
sovereignSettlementNodes: true,
cbdcSystem: true,
globalSettlementSystem: true,
instantSettlementNetwork: true,
fxEngine: true,
complianceAML: true,
treasuryManagement: true,
identityGraph: true,
quantumResistant: true,
multiAssetSupport: true,
crossBorderSettlement: true,
iso20022: true,
hsmIntegration: true,
},
technicalSpecs: {
systemType: 'Core Banking System',
architecture: 'Sovereign-grade financial infrastructure',
targetUsers: '33 Sovereign Central Banks',
consensus: 'Neural Consensus Engine (NCE)',
ledger: 'Global Quantum Ledger (GQL)',
security: 'Quantum-resistant with XMSS/SPHINCS+ signatures',
compliance: 'Autonomous Regulatory Intelligence (ARI)',
riskManagement: 'Sovereign AI Risk Engine (SARE)',
settlement: 'Global Settlement System (GSS)',
payments: 'Instant Settlement Network (ISN)',
cbdc: 'Full CBDC issuance and management',
integration: 'ISO 20022, SWIFT, ACH, HSM',
scalability: 'Multi-sovereign, multi-asset',
availability: '99.99% uptime SLA',
},
legalFramework: {
serviceAgreement: 'DBIS IRU Participation Agreement',
dataProcessing: 'Sovereign-grade data protection',
jurisdiction: 'Multi-sovereign',
compliance: 'Regulatory compliance per jurisdiction',
},
regulatoryPosition: {
compliance: 'Full regulatory compliance framework',
certifications: ['ISO 27001', 'SOC 2 Type II', 'PCI DSS'],
dataResidency: 'Sovereign-specific',
regulatoryIntelligence: 'Autonomous regulatory compliance',
},
documents: {
serviceAgreement: '/documents/dbis-core-banking-agreement.pdf',
technicalDocumentation: '/documents/dbis-architecture-atlas.pdf',
apiDocumentation: '/documents/dbis-api-guide.pdf',
integrationGuide: '/documents/dbis-integration-guide.pdf',
legalFramework: '/legal/README.md',
},
status: 'active',
displayOrder: 1, // Top priority for private offerings
updatedAt: new Date(),
},
});
console.log(`✅ Offering ${offeringId} updated successfully`);
} else {
await prisma.iruOffering.create({
data: {
id: uuidv4(),
offeringId,
name: 'DBIS Core Banking System',
description: 'Sovereign-grade financial infrastructure for the Digital Bank of International Settlements and 33 Sovereign Central Banks. Complete core banking system with neural consensus engine, global quantum ledger, autonomous regulatory intelligence, and sovereign AI risk engine.',
capacityTier: 1, // Tier 1: Central Banks only
institutionalType: 'central_bank', // Private offering for central banks
pricingModel: 'private', // Private offering, not publicly listed
basePrice: null, // Pricing negotiated privately
currency: 'USD',
features: {
coreBanking: true,
neuralConsensusEngine: true,
globalQuantumLedger: true,
autonomousRegulatoryIntelligence: true,
sovereignAIRiskEngine: true,
sovereignSettlementNodes: true,
cbdcSystem: true,
globalSettlementSystem: true,
instantSettlementNetwork: true,
fxEngine: true,
complianceAML: true,
treasuryManagement: true,
identityGraph: true,
quantumResistant: true,
multiAssetSupport: true,
crossBorderSettlement: true,
iso20022: true,
hsmIntegration: true,
},
technicalSpecs: {
systemType: 'Core Banking System',
architecture: 'Sovereign-grade financial infrastructure',
targetUsers: '33 Sovereign Central Banks',
consensus: 'Neural Consensus Engine (NCE)',
ledger: 'Global Quantum Ledger (GQL)',
security: 'Quantum-resistant with XMSS/SPHINCS+ signatures',
compliance: 'Autonomous Regulatory Intelligence (ARI)',
riskManagement: 'Sovereign AI Risk Engine (SARE)',
settlement: 'Global Settlement System (GSS)',
payments: 'Instant Settlement Network (ISN)',
cbdc: 'Full CBDC issuance and management',
integration: 'ISO 20022, SWIFT, ACH, HSM',
scalability: 'Multi-sovereign, multi-asset',
availability: '99.99% uptime SLA',
},
legalFramework: {
serviceAgreement: 'DBIS IRU Participation Agreement',
dataProcessing: 'Sovereign-grade data protection',
jurisdiction: 'Multi-sovereign',
compliance: 'Regulatory compliance per jurisdiction',
},
regulatoryPosition: {
compliance: 'Full regulatory compliance framework',
certifications: ['ISO 27001', 'SOC 2 Type II', 'PCI DSS'],
dataResidency: 'Sovereign-specific',
regulatoryIntelligence: 'Autonomous regulatory compliance',
},
documents: {
serviceAgreement: '/documents/dbis-core-banking-agreement.pdf',
technicalDocumentation: '/documents/dbis-architecture-atlas.pdf',
apiDocumentation: '/documents/dbis-api-guide.pdf',
integrationGuide: '/documents/dbis-integration-guide.pdf',
legalFramework: '/legal/README.md',
},
status: 'active',
displayOrder: 1, // Top priority for private offerings
createdAt: new Date(),
updatedAt: new Date(),
},
});
console.log(`✅ Offering ${offeringId} created successfully`);
}
console.log('\n📋 Offering Details:');
console.log(` Offering ID: ${offeringId}`);
console.log(` Name: DBIS Core Banking System`);
console.log(` Type: Private Offering (Central Banks Only)`);
console.log(` Capacity Tier: 1 (Central Banks)`);
console.log(` Status: Active`);
console.log(` Display Order: 1 (Top Priority)`);
}
main()
.catch((e) => {
console.error('Error seeding DBIS Core Banking offering:', e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});

View File

@@ -0,0 +1,58 @@
import { capabilityRegistryService } from '@/core/solacenet/registry/capability-registry.service';
import { logger } from '@/infrastructure/monitoring/logger';
async function main() {
try {
logger.info('Registering DBIS Gateway Microservices capability...');
await capabilityRegistryService.createCapability({
capabilityId: 'gateway-microservices',
name: 'DBIS Gateway Microservices',
version: '1.0.0',
description:
'Regulated-grade integration fabric for SWIFT, DTC/DTCC, and extensible financial rails',
defaultState: 'enabled' as any,
dependencies: ['ledger', 'iso20022', 'reconciliation'].filter(Boolean),
});
const subCapabilities = [
{ capabilityId: 'gateway-edge', name: 'Gateway Edge Plane' },
{ capabilityId: 'gateway-control', name: 'Gateway Control Plane' },
{ capabilityId: 'gateway-operations', name: 'Gateway Operations Plane' },
{ capabilityId: 'gateway-adapters', name: 'Gateway Adapter Plane' },
];
for (const sub of subCapabilities) {
try {
await capabilityRegistryService.createCapability({
capabilityId: sub.capabilityId,
name: sub.name,
version: '1.0.0',
description: sub.name,
defaultState: 'enabled' as any,
dependencies: ['gateway-microservices'],
});
} catch (err: any) {
if (err?.message?.includes('already exists')) {
logger.info(`Capability ${sub.capabilityId} already exists, skipping`);
} else {
throw err;
}
}
}
logger.info('Gateway capabilities registered.');
} catch (error: any) {
if (error?.message?.includes('already exists')) {
logger.info('Capability gateway-microservices already exists, skipping');
} else {
logger.error('Failed to register gateway capability', { error: error?.message || error });
process.exitCode = 1;
}
}
}
if (require.main === module) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
main();
}

225
scripts/seed-solacenet.ts Normal file
View File

@@ -0,0 +1,225 @@
// SolaceNet Seed Script
// Populates initial capability catalog
import { capabilityRegistryService } from '../src/core/solacenet/registry/capability-registry.service';
import { logger } from '../src/infrastructure/monitoring/logger';
async function seedCapabilities() {
logger.info('Starting SolaceNet capability seed...');
const capabilities = [
// Core Platform Primitives
{
capabilityId: 'tenant-service',
name: 'Tenant Service',
description: 'Tenant, program, and environment management',
defaultState: 'enabled',
},
{
capabilityId: 'iam-auth',
name: 'IAM/Auth Service',
description: 'OIDC, RBAC/ABAC, service-to-service authentication',
defaultState: 'enabled',
},
{
capabilityId: 'kyc-kyb',
name: 'KYC/KYB Orchestration',
description: 'Workflow, document intake, verification',
defaultState: 'enabled',
},
{
capabilityId: 'aml-monitoring',
name: 'AML Monitoring',
description: 'Transaction monitoring rules and scoring',
defaultState: 'enabled',
},
{
capabilityId: 'ledger',
name: 'Ledger Service',
description: 'Double-entry, immutable journal',
defaultState: 'enabled',
},
{
capabilityId: 'limits-velocity',
name: 'Limits & Velocity Controls',
description: 'Per user/account/merchant limits',
defaultState: 'enabled',
},
{
capabilityId: 'fees-pricing',
name: 'Fees & Pricing Engine',
description: 'Fee schedules, interchange sharing, tiering',
defaultState: 'enabled',
},
{
capabilityId: 'risk-rules',
name: 'Risk Rules Engine',
description: 'Configurable fraud detection rules',
defaultState: 'enabled',
},
// Payments & Merchant
{
capabilityId: 'payment-gateway',
name: 'Payment Gateway',
description: 'Pay-in intents, captures, refunds',
defaultState: 'disabled',
dependencies: ['ledger', 'limits-velocity', 'fees-pricing'],
},
{
capabilityId: 'merchant-onboarding',
name: 'Merchant Onboarding',
description: 'KYB + underwriting',
defaultState: 'disabled',
dependencies: ['kyc-kyb'],
},
{
capabilityId: 'merchant-processing',
name: 'Merchant Processing',
description: 'Authorization/capture/refund',
defaultState: 'disabled',
dependencies: ['payment-gateway'],
},
{
capabilityId: 'disputes',
name: 'Disputes/Chargebacks',
description: 'Representment workflows',
defaultState: 'disabled',
dependencies: ['merchant-processing'],
},
{
capabilityId: 'payouts',
name: 'Payouts',
description: 'Bank payout, push-to-card payout',
defaultState: 'disabled',
dependencies: ['payment-gateway', 'ledger'],
},
// Cards
{
capabilityId: 'card-issuing',
name: 'Card Issuing',
description: 'Virtual/physical card issuance',
defaultState: 'disabled',
dependencies: ['ledger', 'risk-rules'],
},
{
capabilityId: 'card-lifecycle',
name: 'Card Lifecycle',
description: 'Create/activate/PIN/replace',
defaultState: 'disabled',
dependencies: ['card-issuing'],
},
{
capabilityId: 'card-controls',
name: 'Card Controls',
description: 'Freeze, limits, MCC/merchant category blocks',
defaultState: 'disabled',
dependencies: ['card-issuing'],
},
{
capabilityId: 'authorization-decisioning',
name: 'Authorization Decisioning',
description: 'Real-time rules + risk',
defaultState: 'disabled',
dependencies: ['card-issuing', 'risk-rules'],
},
// Wallets & Transfers
{
capabilityId: 'wallet-accounts',
name: 'Wallet Accounts',
description: 'Stored value with sub-ledgers',
defaultState: 'disabled',
dependencies: ['ledger'],
},
{
capabilityId: 'p2p-transfers',
name: 'P2P Transfers',
description: 'Internal transfers',
defaultState: 'disabled',
dependencies: ['wallet-accounts'],
},
{
capabilityId: 'bank-transfers',
name: 'Bank Transfers',
description: 'ACH/SEPA/FPS via connector',
defaultState: 'disabled',
dependencies: ['wallet-accounts'],
},
{
capabilityId: 'account-funding',
name: 'Account Funding',
description: 'Bank transfer, card, cash',
defaultState: 'disabled',
dependencies: ['wallet-accounts'],
},
// Mobile Money
{
capabilityId: 'mobile-money-connector',
name: 'Mobile Money Connector',
description: 'Abstraction layer for mobile money providers',
defaultState: 'disabled',
dependencies: ['ledger'],
},
{
capabilityId: 'mobile-money-cash-in',
name: 'Mobile Money Cash-In',
description: 'Cash-in orchestration',
defaultState: 'disabled',
dependencies: ['mobile-money-connector'],
},
{
capabilityId: 'mobile-money-cash-out',
name: 'Mobile Money Cash-Out',
description: 'Cash-out orchestration',
defaultState: 'disabled',
dependencies: ['mobile-money-connector'],
},
{
capabilityId: 'mobile-money-transfers',
name: 'Mobile Money Transfers',
description: 'Domestic transfers',
defaultState: 'disabled',
dependencies: ['mobile-money-connector'],
},
];
for (const cap of capabilities) {
try {
await capabilityRegistryService.createCapability({
capabilityId: cap.capabilityId,
name: cap.name,
version: '1.0.0',
description: cap.description,
defaultState: cap.defaultState as any,
dependencies: cap.dependencies || [],
});
logger.info(`Created capability: ${cap.capabilityId}`);
} catch (error: any) {
if (error.message?.includes('already exists')) {
logger.info(`Capability ${cap.capabilityId} already exists, skipping`);
} else {
logger.error(`Failed to create capability ${cap.capabilityId}`, { error });
}
}
}
logger.info('SolaceNet capability seed completed!');
}
// Run if called directly
if (require.main === module) {
seedCapabilities()
.then(() => {
logger.info('Seed script completed successfully');
process.exit(0);
})
.catch((error) => {
logger.error('Seed script failed', { error });
process.exit(1);
});
}
export { seedCapabilities };

View File

@@ -0,0 +1,159 @@
// Seed Vault Marketplace Offering
// Adds Vault service to Sankofa Phoenix Marketplace
import { PrismaClient } from '@prisma/client';
import { v4 as uuidv4 } from 'uuid';
const prisma = new PrismaClient();
async function main() {
console.log('Seeding Vault marketplace offering...');
const offeringId = 'VAULT-VIRTUAL-VAULT';
const existingOffering = await prisma.iruOffering.findUnique({
where: { offeringId },
});
if (existingOffering) {
console.log(`Offering ${offeringId} already exists. Updating...`);
await prisma.iruOffering.update({
where: { offeringId },
data: {
name: 'Virtual Vault Service',
description: 'Enterprise-grade secrets management with HashiCorp Vault. Create isolated virtual vaults on our high-availability Vault cluster for secure storage and management of secrets, API keys, certificates, and sensitive configuration data.',
capacityTier: 0, // Available to all tiers
institutionalType: 'all',
pricingModel: 'subscription',
basePrice: 500.00, // Monthly base price
currency: 'USD',
features: {
secretsManagement: true,
encryptionAtRest: true,
encryptionInTransit: true,
highAvailability: true,
automaticBackups: true,
auditLogging: true,
apiAccess: true,
cliAccess: true,
sdkSupport: ['nodejs', 'python', 'java', 'go', 'dotnet'],
integrations: ['kubernetes', 'terraform', 'ansible', 'jenkins'],
},
technicalSpecs: {
vaultVersion: '1.21.2',
clusterType: 'Raft HA',
nodeCount: 3,
redundancy: 'Full',
storageBackend: 'Raft',
apiEndpoints: [
'http://192.168.11.200:8200',
'http://192.168.11.215:8200',
'http://192.168.11.202:8200',
],
authentication: ['AppRole', 'Token', 'LDAP', 'OIDC'],
encryption: 'AES-256-GCM',
compliance: ['SOC 2', 'ISO 27001', 'GDPR'],
sla: '99.9%',
backupFrequency: 'Daily',
retention: '30 days',
},
legalFramework: {
serviceAgreement: 'Virtual Vault Service Agreement',
dataProcessing: 'GDPR Compliant',
jurisdiction: 'International',
},
regulatoryPosition: {
compliance: 'Enterprise-grade security and compliance',
certifications: ['SOC 2', 'ISO 27001'],
dataResidency: 'Configurable',
},
documents: {
serviceAgreement: '/documents/vault-service-agreement.pdf',
technicalDocumentation: '/documents/vault-technical-specs.pdf',
apiDocumentation: '/documents/vault-api-docs.pdf',
integrationGuide: '/documents/vault-integration-guide.pdf',
},
status: 'active',
displayOrder: 10,
updatedAt: new Date(),
},
});
console.log(`✅ Offering ${offeringId} updated successfully`);
} else {
await prisma.iruOffering.create({
data: {
id: uuidv4(),
offeringId,
name: 'Virtual Vault Service',
description: 'Enterprise-grade secrets management with HashiCorp Vault. Create isolated virtual vaults on our high-availability Vault cluster for secure storage and management of secrets, API keys, certificates, and sensitive configuration data.',
capacityTier: 0, // Available to all tiers
institutionalType: 'all',
pricingModel: 'subscription',
basePrice: 500.00, // Monthly base price
currency: 'USD',
features: {
secretsManagement: true,
encryptionAtRest: true,
encryptionInTransit: true,
highAvailability: true,
automaticBackups: true,
auditLogging: true,
apiAccess: true,
cliAccess: true,
sdkSupport: ['nodejs', 'python', 'java', 'go', 'dotnet'],
integrations: ['kubernetes', 'terraform', 'ansible', 'jenkins'],
},
technicalSpecs: {
vaultVersion: '1.21.2',
clusterType: 'Raft HA',
nodeCount: 3,
redundancy: 'Full',
storageBackend: 'Raft',
apiEndpoints: [
'http://192.168.11.200:8200',
'http://192.168.11.215:8200',
'http://192.168.11.202:8200',
],
authentication: ['AppRole', 'Token', 'LDAP', 'OIDC'],
encryption: 'AES-256-GCM',
compliance: ['SOC 2', 'ISO 27001', 'GDPR'],
sla: '99.9%',
backupFrequency: 'Daily',
retention: '30 days',
},
legalFramework: {
serviceAgreement: 'Virtual Vault Service Agreement',
dataProcessing: 'GDPR Compliant',
jurisdiction: 'International',
},
regulatoryPosition: {
compliance: 'Enterprise-grade security and compliance',
certifications: ['SOC 2', 'ISO 27001'],
dataResidency: 'Configurable',
},
documents: {
serviceAgreement: '/documents/vault-service-agreement.pdf',
technicalDocumentation: '/documents/vault-technical-specs.pdf',
apiDocumentation: '/documents/vault-api-docs.pdf',
integrationGuide: '/documents/vault-integration-guide.pdf',
},
status: 'active',
displayOrder: 10,
createdAt: new Date(),
updatedAt: new Date(),
},
});
console.log(`✅ Offering ${offeringId} created successfully`);
}
}
main()
.catch((e) => {
console.error('Error seeding Vault offering:', e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});

97
scripts/setup-as4-complete.sh Executable file
View File

@@ -0,0 +1,97 @@
#!/bin/bash
# Complete AS4 Settlement Setup Script
# Automates all setup steps that can be done without database
set -e
echo "========================================="
echo "AS4 Settlement Complete Setup"
echo "========================================="
cd "$(dirname "$0")/.."
# Step 1: Verify prerequisites
echo ""
echo "Step 1: Verifying prerequisites..."
./scripts/verify-as4-setup.sh || {
echo "Prerequisites check failed. Please fix errors and try again."
exit 1
}
# Step 2: Generate certificates
echo ""
echo "Step 2: Generating certificates..."
if [ ! -f "certs/as4/as4-tls-cert.pem" ]; then
./scripts/generate-as4-certificates.sh
else
echo "Certificates already exist, skipping..."
fi
# Step 3: Update .env file
echo ""
echo "Step 3: Updating .env file..."
if [ ! -f ".env" ]; then
echo "Creating .env from .env.as4.example..."
cp .env.as4.example .env
echo "⚠ Please edit .env and configure all values"
else
echo ".env file exists, checking for AS4 variables..."
if ! grep -q "AS4_BASE_URL" .env; then
echo "Adding AS4 variables to .env..."
cat .env.as4.example >> .env
echo "⚠ Please review and configure AS4 variables in .env"
else
echo "✓ AS4 variables already in .env"
fi
fi
# Step 4: Install dependencies
echo ""
echo "Step 4: Installing dependencies..."
npm install ajv ajv-formats --save
# Step 5: Generate Prisma client
echo ""
echo "Step 5: Generating Prisma client..."
npx prisma generate
# Step 6: Verify TypeScript compilation
echo ""
echo "Step 6: Verifying TypeScript compilation..."
if npx tsc --noEmit src/core/settlement/as4/**/*.ts src/core/settlement/as4-settlement/**/*.ts 2>&1 | grep -q "error TS"; then
echo "⚠ TypeScript compilation has errors (may be path resolution issues)"
else
echo "✓ TypeScript compilation check passed"
fi
# Step 7: Verify routes
echo ""
echo "Step 7: Verifying route registration..."
if grep -q "as4GatewayRoutes" src/integration/api-gateway/app.ts; then
echo "✓ Routes registered"
else
echo "✗ Routes not registered"
exit 1
fi
# Step 8: Create necessary directories
echo ""
echo "Step 8: Creating directories..."
mkdir -p logs/as4
mkdir -p data/as4/vault
mkdir -p certs/as4
echo "✓ Directories created"
# Summary
echo ""
echo "========================================="
echo "Setup Complete!"
echo "========================================="
echo ""
echo "Next steps (require database):"
echo "1. Ensure database is running and accessible"
echo "2. Run migration: npx prisma migrate deploy"
echo "3. Seed marketplace: npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts"
echo "4. Start server: npm run dev"
echo "5. Test endpoints: curl http://localhost:3000/health"
echo ""

View File

@@ -0,0 +1,165 @@
#!/bin/bash
# Setup Local Development Environment for AS4 Settlement
# Sets up Docker Compose and local configuration
set -e
echo "========================================="
echo "AS4 Settlement Local Development Setup"
echo "========================================="
echo ""
cd "$(dirname "$0")/.."
# Step 1: Check Docker
echo "Step 1: Checking Docker..."
if command -v docker &> /dev/null; then
DOCKER_VERSION=$(docker --version)
echo " ✓ Docker installed: $DOCKER_VERSION"
else
echo " ✗ Docker not found. Please install Docker first."
exit 1
fi
if command -v docker-compose &> /dev/null || docker compose version &> /dev/null; then
echo " ✓ Docker Compose available"
else
echo " ✗ Docker Compose not found. Please install Docker Compose first."
exit 1
fi
echo ""
# Step 2: Setup environment for local development
echo "Step 2: Setting up environment..."
if [ ! -f .env ]; then
echo " ⚠ .env not found. Creating basic .env..."
cat > .env <<EOF
# Database (Docker Compose)
DATABASE_URL=postgresql://dbis_user:dbis_password@localhost:5432/dbis_core
# Redis (Docker Compose)
REDIS_URL=redis://localhost:6379
# AS4 Configuration
AS4_BASE_URL=http://localhost:3000
AS4_LOG_LEVEL=debug
NODE_ENV=development
EOF
echo " ✓ .env file created"
else
echo " ✓ .env already exists"
# Update DATABASE_URL for Docker Compose if needed
if grep -q "192.168.11.105" .env; then
echo " ⚠ Updating DATABASE_URL for local Docker Compose..."
sed -i.bak 's|DATABASE_URL=.*|DATABASE_URL=postgresql://dbis_user:dbis_password@localhost:5432/dbis_core|' .env
echo " ✓ DATABASE_URL updated for local development"
fi
fi
echo ""
# Step 3: Generate certificates if needed
echo "Step 3: Generating certificates..."
if [ ! -f "certs/as4/as4-tls-cert.pem" ]; then
./scripts/generate-as4-certificates.sh
else
echo " ✓ Certificates already exist"
fi
echo ""
# Step 4: Create necessary directories
echo "Step 4: Creating directories..."
mkdir -p logs/as4
mkdir -p data/as4/vault
mkdir -p certs/as4
echo " ✓ Directories created"
echo ""
# Step 5: Start Docker Compose services
echo "Step 5: Starting Docker Compose services..."
if docker compose version &> /dev/null; then
DOCKER_COMPOSE_CMD="docker compose"
else
DOCKER_COMPOSE_CMD="docker-compose"
fi
cd docker
if $DOCKER_COMPOSE_CMD -f docker-compose.as4.yml up -d postgres redis; then
echo " ✓ Docker services started"
echo " Waiting for services to be ready..."
sleep 5
# Wait for PostgreSQL
echo " Waiting for PostgreSQL..."
for i in {1..30}; do
if docker compose -f docker-compose.as4.yml exec -T postgres pg_isready -U dbis_user &> /dev/null; then
echo " ✓ PostgreSQL is ready"
break
fi
if [ $i -eq 30 ]; then
echo " ⚠ PostgreSQL may not be ready yet"
fi
sleep 1
done
# Wait for Redis
echo " Waiting for Redis..."
for i in {1..30}; do
if docker compose -f docker-compose.as4.yml exec -T redis redis-cli ping &> /dev/null; then
echo " ✓ Redis is ready"
break
fi
if [ $i -eq 30 ]; then
echo " ⚠ Redis may not be ready yet"
fi
sleep 1
done
else
echo " ✗ Failed to start Docker services"
exit 1
fi
cd ..
echo ""
# Step 6: Generate Prisma client
echo "Step 6: Generating Prisma client..."
npx prisma generate
echo " ✓ Prisma client generated"
echo ""
# Step 7: Run database migration
echo "Step 7: Running database migration..."
if npx prisma migrate deploy; then
echo " ✓ Migration successful"
else
echo " ⚠ Migration failed - database may not be ready yet"
echo " Run manually: npx prisma migrate deploy"
fi
echo ""
# Step 8: Seed marketplace
echo "Step 8: Seeding marketplace offering..."
if npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts; then
echo " ✓ Marketplace seeded"
else
echo " ⚠ Seeding failed - database may not be ready yet"
echo " Run manually: npx ts-node scripts/seed-as4-settlement-marketplace-offering.ts"
fi
echo ""
# Summary
echo "========================================="
echo "Local Development Setup Complete!"
echo "========================================="
echo ""
echo "Services running:"
echo " - PostgreSQL: localhost:5432"
echo " - Redis: localhost:6379"
echo ""
echo "Next steps:"
echo " 1. Verify setup: ./scripts/check-database-status.sh"
echo " 2. Start server: npm run dev"
echo " 3. Test endpoints: ./scripts/test-as4-api.sh"
echo ""
echo "To stop services:"
echo " cd docker && docker compose -f docker-compose.as4.yml down"
echo ""

View File

@@ -0,0 +1,54 @@
#!/bin/bash
# Submit Test Instruction to AS4 Settlement
# Creates and submits a test settlement instruction
set -e
BASE_URL="${AS4_BASE_URL:-http://localhost:3000}"
AUTH_TOKEN="${AS4_AUTH_TOKEN:-}"
FROM_MEMBER="${1:-TEST-MEMBER-001}"
AMOUNT="${2:-1000.00}"
CURRENCY="${3:-USD}"
INSTR_ID="INSTR-TEST-$(date +%s)"
MSG_ID="MSG-TEST-$(date +%s)"
CORR_ID="CORR-$(date +%s)"
NONCE="nonce-$(date +%s)"
echo "Submitting test instruction: $INSTR_ID"
# Calculate payload hash (simplified)
PAYLOAD_HASH=$(echo -n "$INSTR_ID$FROM_MEMBER$AMOUNT" | sha256sum | cut -d' ' -f1 | tr '[:lower:]' '[:upper:]')
curl -X POST "$BASE_URL/api/v1/as4/settlement/instructions" \
-H "Content-Type: application/json" \
${AUTH_TOKEN:+-H "Authorization: Bearer $AUTH_TOKEN"} \
-d "{
\"fromMemberId\": \"$FROM_MEMBER\",
\"payloadHash\": \"$PAYLOAD_HASH\",
\"signatureEvidence\": {},
\"as4ReceiptEvidence\": {},
\"message\": {
\"MessageId\": \"$MSG_ID\",
\"BusinessType\": \"DBIS.SI.202\",
\"CreatedAt\": \"$(date -u +%Y-%m-%dT%H:%M:%SZ)\",
\"FromMemberId\": \"$FROM_MEMBER\",
\"ToMemberId\": \"DBIS\",
\"CorrelationId\": \"$CORR_ID\",
\"ReplayNonce\": \"$NONCE\",
\"SchemaVersion\": \"1.0\",
\"Instr\": {
\"InstrId\": \"$INSTR_ID\",
\"ValueDate\": \"$(date -u +%Y-%m-%d)\",
\"Currency\": \"$CURRENCY\",
\"Amount\": \"$AMOUNT\",
\"DebtorAccount\": \"MSA:$FROM_MEMBER:$CURRENCY\",
\"CreditorAccount\": \"MSA:TEST-MEMBER-002:$CURRENCY\",
\"Charges\": \"SHA\",
\"PurposeCode\": \"SETT\"
}
}
}" | jq '.'
echo ""
echo "Test instruction submitted: $INSTR_ID"

95
scripts/test-as4-api.sh Executable file
View File

@@ -0,0 +1,95 @@
#!/bin/bash
# AS4 Settlement API Testing Script
# Tests all AS4 API endpoints
set -e
BASE_URL="${AS4_BASE_URL:-http://localhost:3000}"
AUTH_TOKEN="${AS4_AUTH_TOKEN:-}"
echo "========================================="
echo "AS4 Settlement API Testing"
echo "========================================="
echo "Base URL: $BASE_URL"
echo ""
# Colors for output
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
PASSED=0
FAILED=0
test_endpoint() {
local method=$1
local endpoint=$2
local data=$3
local expected_status=$4
local description=$5
echo -n "Testing: $description... "
if [ -n "$data" ]; then
response=$(curl -s -w "\n%{http_code}" -X "$method" \
"$BASE_URL$endpoint" \
-H "Content-Type: application/json" \
${AUTH_TOKEN:+-H "Authorization: Bearer $AUTH_TOKEN"} \
-d "$data" 2>&1)
else
response=$(curl -s -w "\n%{http_code}" -X "$method" \
"$BASE_URL$endpoint" \
${AUTH_TOKEN:+-H "Authorization: Bearer $AUTH_TOKEN"} 2>&1)
fi
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [ "$http_code" = "$expected_status" ]; then
echo -e "${GREEN}✓ PASSED${NC} (HTTP $http_code)"
((PASSED++))
return 0
else
echo -e "${RED}✗ FAILED${NC} (Expected HTTP $expected_status, got $http_code)"
echo " Response: $body"
((FAILED++))
return 1
fi
}
# Test 1: Health Check
test_endpoint "GET" "/health" "" "200" "Health Check"
# Test 2: AS4 Metrics
test_endpoint "GET" "/api/v1/as4/metrics" "" "200" "Prometheus Metrics"
# Test 3: AS4 Health Metrics
test_endpoint "GET" "/api/v1/as4/metrics/health" "" "200" "Health Metrics"
# Test 4: Member Directory - Search (may fail if no members)
test_endpoint "GET" "/api/v1/as4/directory/members?status=active" "" "200" "Search Members"
# Test 5: Certificate Expiration Warnings
test_endpoint "GET" "/api/v1/as4/directory/certificates/expiration-warnings" "" "200" "Certificate Warnings"
# Summary
echo ""
echo "========================================="
echo "Test Summary"
echo "========================================="
echo -e "${GREEN}Passed: $PASSED${NC}"
if [ $FAILED -gt 0 ]; then
echo -e "${RED}Failed: $FAILED${NC}"
else
echo -e "${GREEN}Failed: $FAILED${NC}"
fi
echo ""
if [ $FAILED -eq 0 ]; then
echo "✓ All API tests passed!"
exit 0
else
echo "✗ Some API tests failed"
exit 1
fi

93
scripts/test-as4-settlement.sh Executable file
View File

@@ -0,0 +1,93 @@
#!/bin/bash
# AS4 Settlement Testing Script
set -e
echo "========================================="
echo "AS4 Settlement Testing Script"
echo "========================================="
cd "$(dirname "$0")/.."
# Step 1: TypeScript Compilation
echo ""
echo "Step 1: TypeScript Compilation Test..."
if npx tsc --noEmit; then
echo "✓ TypeScript compilation successful"
else
echo "✗ TypeScript compilation failed"
exit 1
fi
# Step 2: Linter
echo ""
echo "Step 2: Linter Check..."
npm run lint || echo "⚠ Linter issues found (non-blocking)"
# Step 3: Unit Tests (if database available)
echo ""
echo "Step 3: Running Integration Tests..."
if npm test -- as4-settlement.test.ts 2>&1; then
echo "✓ Tests passed"
else
echo "⚠ Tests failed or database not available"
echo " Run tests manually when database is available:"
echo " npm test -- as4-settlement.test.ts"
fi
# Step 4: Verify Service Imports
echo ""
echo "Step 4: Verifying Service Imports..."
node -e "
const services = [
'as4-security.service',
'as4-msh.service',
'as4-gateway.service',
'member-directory.service',
'instruction-intake.service',
'posting-engine.service'
];
services.forEach(s => {
try {
require.resolve('./src/core/settlement/as4/' + s.replace('as4-', 'as4/'));
console.log('✓', s);
} catch (e) {
try {
require.resolve('./src/core/settlement/as4-settlement/' + s);
console.log('✓', s);
} catch (e2) {
console.log('✗', s, '- not found');
}
}
});
" 2>&1 || echo "⚠ Service import check completed"
# Step 5: API Route Verification
echo ""
echo "Step 5: API Route Verification..."
if grep -q "/api/v1/as4/gateway" src/integration/api-gateway/app.ts && \
grep -q "/api/v1/as4/directory" src/integration/api-gateway/app.ts && \
grep -q "/api/v1/as4/settlement" src/integration/api-gateway/app.ts; then
echo "✓ All AS4 routes registered"
else
echo "✗ Some AS4 routes missing"
exit 1
fi
# Step 6: Database Schema Verification
echo ""
echo "Step 6: Database Schema Verification..."
if grep -q "model As4Member" prisma/schema.prisma && \
grep -q "model As4SettlementInstruction" prisma/schema.prisma; then
echo "✓ Database models defined"
else
echo "✗ Database models missing"
exit 1
fi
echo ""
echo "========================================="
echo "Testing complete!"
echo "========================================="
echo ""

167
scripts/verify-as4-setup.sh Executable file
View File

@@ -0,0 +1,167 @@
#!/bin/bash
# Verify AS4 Settlement Setup
# Checks all prerequisites and configuration
set -e
echo "========================================="
echo "AS4 Settlement Setup Verification"
echo "========================================="
cd "$(dirname "$0")/.."
ERRORS=0
WARNINGS=0
# Check Node.js
echo ""
echo "1. Checking Node.js..."
if command -v node &> /dev/null; then
NODE_VERSION=$(node --version)
echo " ✓ Node.js installed: $NODE_VERSION"
if [[ $(echo "$NODE_VERSION" | cut -d'v' -f2 | cut -d'.' -f1) -lt 18 ]]; then
echo " ⚠ Warning: Node.js 18+ recommended"
((WARNINGS++))
fi
else
echo " ✗ Node.js not found"
((ERRORS++))
fi
# Check PostgreSQL
echo ""
echo "2. Checking PostgreSQL..."
if command -v psql &> /dev/null; then
PSQL_VERSION=$(psql --version | head -1)
echo " ✓ PostgreSQL installed: $PSQL_VERSION"
# Test connection
if [ -n "$DATABASE_URL" ]; then
if psql "$DATABASE_URL" -c "SELECT 1" &> /dev/null; then
echo " ✓ Database connection successful"
else
echo " ⚠ Warning: Database connection failed"
((WARNINGS++))
fi
else
echo " ⚠ Warning: DATABASE_URL not set"
((WARNINGS++))
fi
else
echo " ✗ PostgreSQL not found"
((ERRORS++))
fi
# Check Redis
echo ""
echo "3. Checking Redis..."
if command -v redis-cli &> /dev/null; then
REDIS_VERSION=$(redis-cli --version | head -1)
echo " ✓ Redis installed: $REDIS_VERSION"
# Test connection
if redis-cli ping &> /dev/null; then
echo " ✓ Redis connection successful"
else
echo " ⚠ Warning: Redis connection failed (may not be running)"
((WARNINGS++))
fi
else
echo " ⚠ Warning: Redis not found (optional for development)"
((WARNINGS++))
fi
# Check Prisma
echo ""
echo "4. Checking Prisma..."
if [ -f "node_modules/.bin/prisma" ]; then
PRISMA_VERSION=$(npx prisma --version | head -1)
echo " ✓ Prisma installed: $PRISMA_VERSION"
else
echo " ✗ Prisma not found - run: npm install"
((ERRORS++))
fi
# Check certificates
echo ""
echo "5. Checking Certificates..."
if [ -f ".env" ]; then
source .env 2>/dev/null || true
if [ -n "$AS4_TLS_CERT_PATH" ] && [ -f "$AS4_TLS_CERT_PATH" ]; then
echo " ✓ TLS certificate found"
else
echo " ⚠ Warning: TLS certificate not found - run: ./scripts/generate-as4-certificates.sh"
((WARNINGS++))
fi
if [ -n "$AS4_SIGNING_CERT_PATH" ] && [ -f "$AS4_SIGNING_CERT_PATH" ]; then
echo " ✓ Signing certificate found"
else
echo " ⚠ Warning: Signing certificate not found"
((WARNINGS++))
fi
else
echo " ⚠ Warning: .env file not found"
((WARNINGS++))
fi
# Check database models
echo ""
echo "6. Checking Database Models..."
if grep -q "model As4Member" prisma/schema.prisma; then
echo " ✓ AS4 models defined in schema"
else
echo " ✗ AS4 models not found in schema"
((ERRORS++))
fi
# Check routes
echo ""
echo "7. Checking Route Registration..."
if grep -q "as4GatewayRoutes" src/integration/api-gateway/app.ts; then
echo " ✓ AS4 routes registered in app.ts"
else
echo " ✗ AS4 routes not registered"
((ERRORS++))
fi
# Check migration file
echo ""
echo "8. Checking Migration File..."
if [ -f "prisma/migrations/20260119000000_add_as4_settlement_models/migration.sql" ]; then
echo " ✓ Migration file exists"
else
echo " ⚠ Warning: Migration file not found"
((WARNINGS++))
fi
# Check seed script
echo ""
echo "9. Checking Seed Script..."
if [ -f "scripts/seed-as4-settlement-marketplace-offering.ts" ]; then
echo " ✓ Seed script exists"
else
echo " ✗ Seed script not found"
((ERRORS++))
fi
# Summary
echo ""
echo "========================================="
echo "Verification Summary"
echo "========================================="
echo "Errors: $ERRORS"
echo "Warnings: $WARNINGS"
echo ""
if [ $ERRORS -eq 0 ]; then
echo "✓ Setup verification passed!"
if [ $WARNINGS -gt 0 ]; then
echo "⚠ Some warnings found (non-blocking)"
fi
exit 0
else
echo "✗ Setup verification failed - fix errors above"
exit 1
fi

View File

@@ -0,0 +1,36 @@
-- Verify Database Column Names
-- Run this to check if your database uses snake_case or camelCase
-- This is CRITICAL before running migrations
-- Check ledger_entries columns
SELECT
column_name,
data_type,
is_nullable
FROM information_schema.columns
WHERE table_name = 'ledger_entries'
AND column_name IN ('ledger_id', 'ledgerId', 'reference_id', 'referenceId',
'debit_account_id', 'debitAccountId', 'credit_account_id', 'creditAccountId')
ORDER BY column_name;
-- Check bank_accounts columns
SELECT
column_name,
data_type,
is_nullable
FROM information_schema.columns
WHERE table_name = 'bank_accounts'
AND column_name IN ('available_balance', 'availableBalance',
'reserved_balance', 'reservedBalance',
'currency_code', 'currencyCode')
ORDER BY column_name;
-- Summary: Count matches
SELECT
CASE
WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'ledger_entries' AND column_name = 'ledger_id')
THEN 'Database uses snake_case (ledger_id)'
WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'ledger_entries' AND column_name = 'ledgerId')
THEN 'Database uses camelCase (ledgerId)'
ELSE 'Cannot determine - table may not exist'
END as column_naming_convention;