This repository has been archived by the owner on Jul 25, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
IPFS announcer: announces batches on ipfs as parquet file (#28)
* add announcements to payload * park some work * add parquet placeholders * some placeholders * add more logic for IPFS announcement * cleanup * add depdencies * remove helia * setup helia * breaking * comment our multiformats for now * set pinning note for announcer * cleanup * cleanup: todo, multiformats issue * use multiformats 0.9.9 * placeholders * set more placeholders * base 32 * cleanup * fill in the blanks * adda test * cleanup batchAnnouncer/extract dnspConverter * revert * cleanup * cleanup * address feedback * cleanupand fix tests * revert * use cache for schemas * rename
- Loading branch information
1 parent
cbfeece
commit b2db31c
Showing
16 changed files
with
700 additions
and
57 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
import { expect, describe, jest, it, beforeEach } from '@jest/globals'; | ||
import assert from 'assert'; | ||
import { FrequencyParquetSchema } from '@dsnp/frequency-schemas/types/frequency'; | ||
import Redis from 'ioredis-mock'; | ||
import { BatchAnnouncer } from './batch.announcer'; | ||
|
||
// Create a mock for the dependencies | ||
const mockConfigService = { | ||
getIpfsCidPlaceholder: jest.fn(), | ||
}; | ||
|
||
const mockBlockchainService = { | ||
getSchema: jest.fn(), | ||
}; | ||
|
||
const mockIpfsService = { | ||
getPinned: jest.fn(), | ||
ipfsPin: jest.fn(), | ||
}; | ||
|
||
describe('BatchAnnouncer', () => { | ||
let ipfsAnnouncer: BatchAnnouncer; | ||
|
||
const broadcast: FrequencyParquetSchema = [ | ||
{ | ||
name: 'announcementType', | ||
column_type: { | ||
INTEGER: { | ||
bit_width: 32, | ||
sign: true, | ||
}, | ||
}, | ||
compression: 'GZIP', | ||
bloom_filter: false, | ||
}, | ||
{ | ||
name: 'contentHash', | ||
column_type: 'BYTE_ARRAY', | ||
compression: 'GZIP', | ||
bloom_filter: true, | ||
}, | ||
{ | ||
name: 'fromId', | ||
column_type: { | ||
INTEGER: { | ||
bit_width: 64, | ||
sign: false, | ||
}, | ||
}, | ||
compression: 'GZIP', | ||
bloom_filter: true, | ||
}, | ||
{ | ||
name: 'url', | ||
column_type: 'STRING', | ||
compression: 'GZIP', | ||
bloom_filter: false, | ||
}, | ||
]; | ||
const mockClient = new Redis(); | ||
|
||
beforeEach(async () => { | ||
ipfsAnnouncer = new BatchAnnouncer(mockClient, mockConfigService as any, mockBlockchainService as any, mockIpfsService as any); | ||
}); | ||
it('should be defined', () => { | ||
expect(ipfsAnnouncer).toBeDefined(); | ||
}); | ||
|
||
// Write your test cases here | ||
it('should announce a batch to IPFS', async () => { | ||
// Mock the necessary dependencies' behavior | ||
mockConfigService.getIpfsCidPlaceholder.mockReturnValue('mockIpfsUrl'); | ||
mockBlockchainService.getSchema.mockReturnValue({ model: JSON.stringify(broadcast) }); | ||
mockIpfsService.getPinned.mockReturnValue(Buffer.from('mockContentBuffer')); | ||
mockIpfsService.ipfsPin.mockReturnValue({ cid: 'mockCid', size: 'mockSize' }); | ||
|
||
const batchJob = { | ||
batchId: 'mockBatchId', | ||
schemaId: 123, | ||
announcements: [], | ||
}; | ||
|
||
const result = await ipfsAnnouncer.announce(batchJob); | ||
assert(result); | ||
expect(mockConfigService.getIpfsCidPlaceholder).toHaveBeenCalledWith('mockCid'); | ||
expect(mockBlockchainService.getSchema).toHaveBeenCalledWith(123); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
import { Injectable, Logger } from '@nestjs/common'; | ||
import { PassThrough } from 'node:stream'; | ||
import { ParquetWriter } from '@dsnp/parquetjs'; | ||
import { fromFrequencySchema } from '@dsnp/frequency-schemas/parquet'; | ||
import { InjectRedis } from '@liaoliaots/nestjs-redis'; | ||
import Redis from 'ioredis'; | ||
import { PalletSchemasSchema } from '@polkadot/types/lookup'; | ||
import { BlockchainService } from '../blockchain/blockchain.service'; | ||
import { ConfigService } from '../../../api/src/config/config.service'; | ||
import { IBatchAnnouncerJobData } from '../interfaces/batch-announcer.job.interface'; | ||
import { IPublisherJob } from '../interfaces/publisher-job.interface'; | ||
import { IpfsService } from '../../../../libs/common/src/utils/ipfs.client'; | ||
|
||
@Injectable() | ||
export class BatchAnnouncer { | ||
private logger: Logger; | ||
|
||
constructor( | ||
@InjectRedis() private cacheManager: Redis, | ||
private configService: ConfigService, | ||
private blockchainService: BlockchainService, | ||
private ipfsService: IpfsService, | ||
) { | ||
this.logger = new Logger(BatchAnnouncer.name); | ||
} | ||
|
||
public async announce(batchJob: IBatchAnnouncerJobData): Promise<IPublisherJob> { | ||
this.logger.debug(`Announcing batch ${batchJob.batchId} on IPFS`); | ||
const { batchId, schemaId, announcements } = batchJob; | ||
|
||
let frequencySchema: PalletSchemasSchema; | ||
|
||
const schemaCacheKey = `schema:${schemaId}`; | ||
const cachedSchema = await this.cacheManager.get(schemaCacheKey); | ||
if (cachedSchema) { | ||
frequencySchema = JSON.parse(cachedSchema); | ||
} else { | ||
frequencySchema = await this.blockchainService.getSchema(schemaId); | ||
await this.cacheManager.set(schemaCacheKey, JSON.stringify(frequencySchema)); | ||
} | ||
|
||
const schema = JSON.parse(frequencySchema.model.toString()); | ||
if (!schema) { | ||
throw new Error(`Unable to parse schema for schemaId ${schemaId}`); | ||
} | ||
|
||
const [parquetSchema, writerOptions] = fromFrequencySchema(schema); | ||
const publishStream = new PassThrough(); | ||
|
||
const writer = await ParquetWriter.openStream(parquetSchema, publishStream as any, writerOptions); | ||
|
||
announcements.forEach(async (announcement) => { | ||
writer.appendRow(announcement); | ||
}); | ||
|
||
await writer.close(); | ||
const buffer = await this.bufferPublishStream(publishStream); | ||
const [cid, hash] = await this.pinStringToIPFS(buffer); | ||
const ipfsUrl = await this.formIpfsUrl(cid); | ||
this.logger.debug(`Batch ${batchId} published to IPFS at ${ipfsUrl}`); | ||
this.logger.debug(`Batch ${batchId} hash: ${hash}`); | ||
return { id: batchId, schemaId, data: { cid, payloadLength: buffer.length } }; | ||
} | ||
|
||
private async bufferPublishStream(publishStream: PassThrough): Promise<Buffer> { | ||
this.logger.debug('Buffering publish stream'); | ||
return new Promise((resolve, reject) => { | ||
const buffers: Buffer[] = []; | ||
publishStream.on('data', (data) => { | ||
buffers.push(data); | ||
}); | ||
publishStream.on('end', () => { | ||
resolve(Buffer.concat(buffers)); | ||
}); | ||
publishStream.on('error', (err) => { | ||
reject(err); | ||
}); | ||
}); | ||
} | ||
|
||
private async pinStringToIPFS(buf: Buffer): Promise<[string, string]> { | ||
const { cid, size } = await this.ipfsService.ipfsPin('application/octet-stream', buf); | ||
return [cid.toString(), size.toString()]; | ||
} | ||
|
||
private async formIpfsUrl(cid: string): Promise<string> { | ||
return this.configService.getIpfsCidPlaceholder(cid); | ||
} | ||
} |
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,7 @@ | ||
import { Announcement } from '../../../../libs/common/src/interfaces/dsnp'; | ||
|
||
export interface IBatchAnnouncerJobData { | ||
batchId: string; | ||
schemaId: number; | ||
announcements: Announcement[]; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.