feat(eresidency): Complete eResidency service implementation
- Implement credential revocation endpoint with proper database integration - Fix database row mapping (snake_case to camelCase) for eResidency applications - Add missing imports (getRiskAssessmentEngine, VeriffKYCProvider, ComplyAdvantageSanctionsProvider) - Fix environment variable type checking for Veriff and ComplyAdvantage providers - Add required 'message' field to notification service calls - Fix risk assessment type mismatches - Update audit logging to use 'verified' action type (supported by schema) - Resolve all TypeScript errors and unused variable warnings - Add TypeScript ignore comments for placeholder implementations - Temporarily disable security/detect-non-literal-regexp rule due to ESLint 9 compatibility - Service now builds successfully with no linter errors All core functionality implemented: - Application submission and management - KYC integration (Veriff placeholder) - Sanctions screening (ComplyAdvantage placeholder) - Risk assessment engine - Credential issuance and revocation - Reviewer console - Status endpoints - Auto-issuance service
This commit is contained in:
6
packages/storage/src/index.d.ts
vendored
Normal file
6
packages/storage/src/index.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* The Order Storage Package
|
||||
*/
|
||||
export * from './storage';
|
||||
export * from './worm';
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
packages/storage/src/index.d.ts.map
Normal file
1
packages/storage/src/index.d.ts.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,cAAc,WAAW,CAAC;AAC1B,cAAc,QAAQ,CAAC"}
|
||||
6
packages/storage/src/index.js
Normal file
6
packages/storage/src/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* The Order Storage Package
|
||||
*/
|
||||
export * from './storage';
|
||||
export * from './worm';
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
packages/storage/src/index.js.map
Normal file
1
packages/storage/src/index.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,cAAc,WAAW,CAAC;AAC1B,cAAc,QAAQ,CAAC"}
|
||||
29
packages/storage/src/storage.d.ts
vendored
Normal file
29
packages/storage/src/storage.d.ts
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* Storage abstraction for S3/GCS
|
||||
*/
|
||||
import { S3Client } from '@aws-sdk/client-s3';
|
||||
export interface StorageConfig {
|
||||
provider: 's3' | 'gcs';
|
||||
bucket: string;
|
||||
region?: string;
|
||||
accessKeyId?: string;
|
||||
secretAccessKey?: string;
|
||||
}
|
||||
export interface StorageObject {
|
||||
key: string;
|
||||
content: Buffer | string;
|
||||
contentType?: string;
|
||||
metadata?: Record<string, string>;
|
||||
}
|
||||
export declare class StorageClient {
|
||||
protected config: StorageConfig;
|
||||
protected s3Client: S3Client;
|
||||
protected bucket: string;
|
||||
constructor(config: StorageConfig);
|
||||
upload(object: StorageObject): Promise<string>;
|
||||
download(key: string): Promise<Buffer>;
|
||||
delete(key: string): Promise<void>;
|
||||
getPresignedUrl(key: string, expiresIn: number): Promise<string>;
|
||||
objectExists(key: string): Promise<boolean>;
|
||||
}
|
||||
//# sourceMappingURL=storage.d.ts.map
|
||||
1
packages/storage/src/storage.d.ts.map
Normal file
1
packages/storage/src/storage.d.ts.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"storage.d.ts","sourceRoot":"","sources":["storage.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EACL,QAAQ,EAKT,MAAM,oBAAoB,CAAC;AAG5B,MAAM,WAAW,aAAa;IAC5B,QAAQ,EAAE,IAAI,GAAG,KAAK,CAAC;IACvB,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,MAAM,WAAW,aAAa;IAC5B,GAAG,EAAE,MAAM,CAAC;IACZ,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACnC;AAED,qBAAa,aAAa;IAIZ,SAAS,CAAC,MAAM,EAAE,aAAa;IAH3C,SAAS,CAAC,QAAQ,EAAE,QAAQ,CAAC;IAC7B,SAAS,CAAC,MAAM,EAAE,MAAM,CAAC;gBAEH,MAAM,EAAE,aAAa;IAcrC,MAAM,CAAC,MAAM,EAAE,aAAa,GAAG,OAAO,CAAC,MAAM,CAAC;IAa9C,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAkBtC,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IASlC,eAAe,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAShE,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;CAelD"}
|
||||
80
packages/storage/src/storage.js
Normal file
80
packages/storage/src/storage.js
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* Storage abstraction for S3/GCS
|
||||
*/
|
||||
import { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, HeadObjectCommand, } from '@aws-sdk/client-s3';
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
export class StorageClient {
|
||||
config;
|
||||
s3Client;
|
||||
bucket;
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.bucket = config.bucket;
|
||||
this.s3Client = new S3Client({
|
||||
region: config.region || 'us-east-1',
|
||||
credentials: config.accessKeyId && config.secretAccessKey
|
||||
? {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
async upload(object) {
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: object.key,
|
||||
Body: typeof object.content === 'string' ? Buffer.from(object.content) : object.content,
|
||||
ContentType: object.contentType,
|
||||
Metadata: object.metadata,
|
||||
});
|
||||
await this.s3Client.send(command);
|
||||
return object.key;
|
||||
}
|
||||
async download(key) {
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
const response = await this.s3Client.send(command);
|
||||
if (!response.Body) {
|
||||
throw new Error(`Object ${key} not found or empty`);
|
||||
}
|
||||
const chunks = [];
|
||||
for await (const chunk of response.Body) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
}
|
||||
async delete(key) {
|
||||
const command = new DeleteObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
await this.s3Client.send(command);
|
||||
}
|
||||
async getPresignedUrl(key, expiresIn) {
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
return getSignedUrl(this.s3Client, command, { expiresIn });
|
||||
}
|
||||
async objectExists(key) {
|
||||
try {
|
||||
const command = new HeadObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
await this.s3Client.send(command);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error && typeof error === 'object' && 'name' in error && error.name === 'NotFound') {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=storage.js.map
|
||||
1
packages/storage/src/storage.js.map
Normal file
1
packages/storage/src/storage.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"storage.js","sourceRoot":"","sources":["storage.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EACL,QAAQ,EACR,gBAAgB,EAChB,gBAAgB,EAChB,mBAAmB,EACnB,iBAAiB,GAClB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAiB7D,MAAM,OAAO,aAAa;IAIF;IAHZ,QAAQ,CAAW;IACnB,MAAM,CAAS;IAEzB,YAAsB,MAAqB;QAArB,WAAM,GAAN,MAAM,CAAe;QACzC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,QAAQ,GAAG,IAAI,QAAQ,CAAC;YAC3B,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,WAAW;YACpC,WAAW,EACT,MAAM,CAAC,WAAW,IAAI,MAAM,CAAC,eAAe;gBAC1C,CAAC,CAAC;oBACE,WAAW,EAAE,MAAM,CAAC,WAAW;oBAC/B,eAAe,EAAE,MAAM,CAAC,eAAe;iBACxC;gBACH,CAAC,CAAC,SAAS;SAChB,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,MAAqB;QAChC,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC;YACnC,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,GAAG,EAAE,MAAM,CAAC,GAAG;YACf,IAAI,EAAE,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO;YACvF,WAAW,EAAE,MAAM,CAAC,WAAW;YAC/B,QAAQ,EAAE,MAAM,CAAC,QAAQ;SAC1B,CAAC,CAAC;QAEH,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAClC,OAAO,MAAM,CAAC,GAAG,CAAC;IACpB,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,GAAW;QACxB,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC;YACnC,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,GAAG,EAAE,GAAG;SACT,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACnD,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACnB,MAAM,IAAI,KAAK,CAAC,UAAU,GAAG,qBAAqB,CAAC,CAAC;QACtD,CAAC;QAED,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,CAAC,IAAW,EAAE,CAAC;YAC/C,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACrB,CAAC;QACD,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAC/B,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,GAAW;QACtB,MAAM,OAAO,GAAG,IAAI,mBAAmB,CAAC;YACtC,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,GAAG,EAAE,GAAG;SACT,CAAC,CAAC;QAEH,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACpC,CAAC;IAED,KAAK,CAAC,eAAe,CAAC,GAAW,EAAE,SAAiB;QAClD,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC;YACnC,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,GAAG,EAAE,GAAG;SACT,CAAC,CAAC;QAEH,OAAO,YAAY,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,SAAS,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,YAAY,CAAC,GAAW;QAC5B,IAAI,CAAC;YACH,MAAM,OAAO,GAAG,IAAI,iBAAiB,CAAC;gBACpC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,GAAG,EAAE,GAAG;aACT,CAAC,CAAC;YACH,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAClC,OAAO,IAAI,CAAC;QACd,CAAC;QAAC,OAAO,KAAc,EAAE,CAAC;YACxB,IAAI,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,MAAM,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;gBACvF,OAAO,KAAK,CAAC;YACf,CAAC;YACD,MAAM,KAAK,CAAC;QACd,CAAC;IACH,CAAC;CACF"}
|
||||
169
packages/storage/src/storage.test.ts
Normal file
169
packages/storage/src/storage.test.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
/**
|
||||
* Storage Client Tests
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { StorageClient } from './storage';
|
||||
import {
|
||||
S3Client,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
HeadObjectCommand,
|
||||
} from '@aws-sdk/client-s3';
|
||||
|
||||
vi.mock('@aws-sdk/client-s3');
|
||||
vi.mock('@aws-sdk/s3-request-presigner');
|
||||
|
||||
describe('StorageClient', () => {
|
||||
let client: StorageClient;
|
||||
const config = {
|
||||
provider: 's3' as const,
|
||||
bucket: 'test-bucket',
|
||||
region: 'us-east-1',
|
||||
accessKeyId: 'test-access-key',
|
||||
secretAccessKey: 'test-secret-key',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
client = new StorageClient(config);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('upload', () => {
|
||||
it('should upload object to S3', async () => {
|
||||
const object = {
|
||||
key: 'test-key',
|
||||
content: Buffer.from('test content'),
|
||||
contentType: 'text/plain',
|
||||
};
|
||||
|
||||
const mockSend = vi.fn().mockResolvedValueOnce({});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
const result = await client.upload(object);
|
||||
|
||||
expect(result).toBe(object.key);
|
||||
expect(mockSend).toHaveBeenCalledWith(
|
||||
expect.any(PutObjectCommand)
|
||||
);
|
||||
});
|
||||
|
||||
it('should upload string content', async () => {
|
||||
const object = {
|
||||
key: 'test-key',
|
||||
content: 'test content',
|
||||
};
|
||||
|
||||
const mockSend = vi.fn().mockResolvedValueOnce({});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
const result = await client.upload(object);
|
||||
|
||||
expect(result).toBe(object.key);
|
||||
});
|
||||
});
|
||||
|
||||
describe('download', () => {
|
||||
it('should download object from S3', async () => {
|
||||
const key = 'test-key';
|
||||
const content = Buffer.from('test content');
|
||||
|
||||
const mockStream = {
|
||||
[Symbol.asyncIterator]: async function* () {
|
||||
yield content;
|
||||
},
|
||||
};
|
||||
|
||||
const mockSend = vi.fn().mockResolvedValueOnce({
|
||||
Body: mockStream,
|
||||
});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
const result = await client.download(key);
|
||||
|
||||
expect(result).toBeInstanceOf(Buffer);
|
||||
expect(mockSend).toHaveBeenCalledWith(
|
||||
expect.any(GetObjectCommand)
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error if object not found', async () => {
|
||||
const key = 'non-existent-key';
|
||||
|
||||
const mockSend = vi.fn().mockResolvedValueOnce({
|
||||
Body: undefined,
|
||||
});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
await expect(client.download(key)).rejects.toThrow(
|
||||
'Object non-existent-key not found or empty'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete object from S3', async () => {
|
||||
const key = 'test-key';
|
||||
|
||||
const mockSend = vi.fn().mockResolvedValueOnce({});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
await client.delete(key);
|
||||
|
||||
expect(mockSend).toHaveBeenCalledWith(
|
||||
expect.any(DeleteObjectCommand)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('objectExists', () => {
|
||||
it('should return true if object exists', async () => {
|
||||
const key = 'test-key';
|
||||
|
||||
const mockSend = vi.fn().mockResolvedValueOnce({});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
const result = await client.objectExists(key);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockSend).toHaveBeenCalledWith(
|
||||
expect.any(HeadObjectCommand)
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false if object does not exist', async () => {
|
||||
const key = 'non-existent-key';
|
||||
|
||||
const mockSend = vi.fn().mockRejectedValueOnce({
|
||||
name: 'NotFound',
|
||||
});
|
||||
|
||||
(S3Client as any).mockImplementation(() => ({
|
||||
send: mockSend,
|
||||
}));
|
||||
|
||||
const result = await client.objectExists(key);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -2,6 +2,15 @@
|
||||
* Storage abstraction for S3/GCS
|
||||
*/
|
||||
|
||||
import {
|
||||
S3Client,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
HeadObjectCommand,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
|
||||
export interface StorageConfig {
|
||||
provider: 's3' | 'gcs';
|
||||
bucket: string;
|
||||
@@ -18,26 +27,86 @@ export interface StorageObject {
|
||||
}
|
||||
|
||||
export class StorageClient {
|
||||
constructor(private config: StorageConfig) {}
|
||||
protected s3Client: S3Client;
|
||||
protected bucket: string;
|
||||
|
||||
constructor(protected config: StorageConfig) {
|
||||
this.bucket = config.bucket;
|
||||
this.s3Client = new S3Client({
|
||||
region: config.region || 'us-east-1',
|
||||
credentials:
|
||||
config.accessKeyId && config.secretAccessKey
|
||||
? {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
|
||||
async upload(object: StorageObject): Promise<string> {
|
||||
// Implementation for file upload
|
||||
throw new Error('Not implemented');
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: object.key,
|
||||
Body: typeof object.content === 'string' ? Buffer.from(object.content) : object.content,
|
||||
ContentType: object.contentType,
|
||||
Metadata: object.metadata,
|
||||
});
|
||||
|
||||
await this.s3Client.send(command);
|
||||
return object.key;
|
||||
}
|
||||
|
||||
async download(key: string): Promise<Buffer> {
|
||||
// Implementation for file download
|
||||
throw new Error('Not implemented');
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
|
||||
const response = await this.s3Client.send(command);
|
||||
if (!response.Body) {
|
||||
throw new Error(`Object ${key} not found or empty`);
|
||||
}
|
||||
|
||||
const chunks: Uint8Array[] = [];
|
||||
for await (const chunk of response.Body as any) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
// Implementation for file deletion
|
||||
throw new Error('Not implemented');
|
||||
const command = new DeleteObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
|
||||
await this.s3Client.send(command);
|
||||
}
|
||||
|
||||
async getPresignedUrl(key: string, expiresIn: number): Promise<string> {
|
||||
// Implementation for presigned URL generation
|
||||
throw new Error('Not implemented');
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
|
||||
return getSignedUrl(this.s3Client, command, { expiresIn });
|
||||
}
|
||||
|
||||
async objectExists(key: string): Promise<boolean> {
|
||||
try {
|
||||
const command = new HeadObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
await this.s3Client.send(command);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
if (error && typeof error === 'object' && 'name' in error && error.name === 'NotFound') {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
9
packages/storage/src/worm.d.ts
vendored
Normal file
9
packages/storage/src/worm.d.ts
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* WORM (Write Once Read Many) mode storage
|
||||
*/
|
||||
import { StorageClient, StorageObject } from './storage';
|
||||
export declare class WORMStorage extends StorageClient {
|
||||
upload(object: StorageObject): Promise<string>;
|
||||
delete(_key: string): Promise<void>;
|
||||
}
|
||||
//# sourceMappingURL=worm.d.ts.map
|
||||
1
packages/storage/src/worm.d.ts.map
Normal file
1
packages/storage/src/worm.d.ts.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"worm.d.ts","sourceRoot":"","sources":["worm.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,WAAW,CAAC;AAEzD,qBAAa,WAAY,SAAQ,aAAa;IACtC,MAAM,CAAC,MAAM,EAAE,aAAa,GAAG,OAAO,CAAC,MAAM,CAAC;IAS9C,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAG1C"}
|
||||
18
packages/storage/src/worm.js
Normal file
18
packages/storage/src/worm.js
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* WORM (Write Once Read Many) mode storage
|
||||
*/
|
||||
import { StorageClient } from './storage';
|
||||
export class WORMStorage extends StorageClient {
|
||||
async upload(object) {
|
||||
// WORM mode: prevent overwrites
|
||||
const exists = await this.objectExists(object.key);
|
||||
if (exists) {
|
||||
throw new Error(`Object ${object.key} already exists in WORM storage`);
|
||||
}
|
||||
return super.upload(object);
|
||||
}
|
||||
async delete(_key) {
|
||||
throw new Error('Deletion not allowed in WORM mode');
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=worm.js.map
|
||||
1
packages/storage/src/worm.js.map
Normal file
1
packages/storage/src/worm.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"worm.js","sourceRoot":"","sources":["worm.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAiB,MAAM,WAAW,CAAC;AAEzD,MAAM,OAAO,WAAY,SAAQ,aAAa;IAC5C,KAAK,CAAC,MAAM,CAAC,MAAqB;QAChC,gCAAgC;QAChC,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,IAAI,KAAK,CAAC,UAAU,MAAM,CAAC,GAAG,iCAAiC,CAAC,CAAC;QACzE,CAAC;QACD,OAAO,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAC9B,CAAC;IAED,KAAK,CAAC,MAAM,CAAC,IAAY;QACvB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;IACvD,CAAC;CACF"}
|
||||
63
packages/storage/src/worm.test.ts
Normal file
63
packages/storage/src/worm.test.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* WORM Storage Tests
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WORMStorage } from './worm';
|
||||
import { StorageClient } from './storage';
|
||||
|
||||
vi.mock('./storage');
|
||||
|
||||
describe('WORMStorage', () => {
|
||||
let storage: WORMStorage;
|
||||
const config = {
|
||||
provider: 's3' as const,
|
||||
bucket: 'test-bucket',
|
||||
region: 'us-east-1',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
storage = new WORMStorage(config);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('upload', () => {
|
||||
it('should upload object if it does not exist', async () => {
|
||||
const object = {
|
||||
key: 'test-key',
|
||||
content: Buffer.from('test content'),
|
||||
};
|
||||
|
||||
vi.spyOn(storage, 'objectExists').mockResolvedValueOnce(false);
|
||||
vi.spyOn(StorageClient.prototype, 'upload').mockResolvedValueOnce(object.key);
|
||||
|
||||
const result = await storage.upload(object);
|
||||
|
||||
expect(result).toBe(object.key);
|
||||
});
|
||||
|
||||
it('should throw error if object already exists', async () => {
|
||||
const object = {
|
||||
key: 'existing-key',
|
||||
content: Buffer.from('test content'),
|
||||
};
|
||||
|
||||
vi.spyOn(storage, 'objectExists').mockResolvedValueOnce(true);
|
||||
|
||||
await expect(storage.upload(object)).rejects.toThrow(
|
||||
'Object existing-key already exists in WORM storage'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should throw error when trying to delete', async () => {
|
||||
const key = 'test-key';
|
||||
|
||||
await expect(storage.delete(key)).rejects.toThrow(
|
||||
'Deletion not allowed in WORM mode'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,13 +14,8 @@ export class WORMStorage extends StorageClient {
|
||||
return super.upload(object);
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
async delete(_key: string): Promise<void> {
|
||||
throw new Error('Deletion not allowed in WORM mode');
|
||||
}
|
||||
|
||||
private async objectExists(key: string): Promise<boolean> {
|
||||
// Implementation to check if object exists
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user