Skip to content

Commit

Permalink
fix: store blockhash alongside blocks (AztecProtocol#3950)
Browse files Browse the repository at this point in the history
Small PR to store a block's hash in the archiver. This should speed
things up when doing small operations like getting a transaction from a
block since logs don't have to deserialized and added to a block.

Related AztecProtocol#3870
  • Loading branch information
alexghr authored Jan 12, 2024
1 parent ae81473 commit 72fe71a
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 39 deletions.
57 changes: 30 additions & 27 deletions yarn-project/archiver/src/archiver/lmdb_archiver_store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ type BlockIndexValue = [blockNumber: number, index: number];

type BlockContext = {
block?: Uint8Array;
blockHash?: Uint8Array;
l1BlockNumber?: Uint8Array;
encryptedLogs?: Uint8Array;
unencryptedLogs?: Uint8Array;
Expand Down Expand Up @@ -125,6 +126,7 @@ export class LMDBArchiverStore implements ArchiverDataStore {
const blockCtx = this.#tables.blocks.get(block.number) ?? {};
blockCtx.block = block.toBuffer();
blockCtx.l1BlockNumber = toBufferBE(block.getL1BlockNumber(), 32);
blockCtx.blockHash = block.getBlockHash();

// no need to await, all writes are enqueued in the transaction
// awaiting would interrupt the execution flow of this callback and "leak" the transaction to some other part
Expand Down Expand Up @@ -163,15 +165,10 @@ export class LMDBArchiverStore implements ArchiverDataStore {
.getRange(this.#computeBlockRange(start, limit))
.filter(({ value }) => value.block)
.map(({ value }) => {
const block = L2Block.fromBuffer(asBuffer(value.block!));
if (value.encryptedLogs) {
block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(value.encryptedLogs)), LogType.ENCRYPTED);
}

if (value.unencryptedLogs) {
block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(value.unencryptedLogs)), LogType.UNENCRYPTED);
}

const block = L2Block.fromBuffer(
asBuffer(value.block!),
value.blockHash ? asBuffer(value.blockHash) : undefined,
);
return block;
}).asArray;

Expand All @@ -193,7 +190,7 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return Promise.resolve(undefined);
}

const block = this.#getBlock(blockNumber, true);
const block = this.#getBlock(blockNumber);
return Promise.resolve(block?.getTx(txIndex));
}

Expand Down Expand Up @@ -415,18 +412,18 @@ export class LMDBArchiverStore implements ArchiverDataStore {
getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
try {
if (filter.afterLog) {
return Promise.resolve(this.#filterLogsBetweenBlocks(filter));
return Promise.resolve(this.#filterUnencryptedLogsBetweenBlocks(filter));
} else if (filter.txHash) {
return Promise.resolve(this.#filterLogsOfTx(filter));
return Promise.resolve(this.#filterUnencryptedLogsOfTx(filter));
} else {
return Promise.resolve(this.#filterLogsBetweenBlocks(filter));
return Promise.resolve(this.#filterUnencryptedLogsBetweenBlocks(filter));
}
} catch (err) {
return Promise.reject(err);
}
}

#filterLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse {
#filterUnencryptedLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse {
if (!filter.txHash) {
throw new Error('Missing txHash');
}
Expand All @@ -436,19 +433,16 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return { logs: [], maxLogsHit: false };
}

const block = this.#getBlock(blockNumber, true);
if (!block || !block.newUnencryptedLogs) {
return { logs: [], maxLogsHit: false };
}
const unencryptedLogsInBlock = this.#getBlockLogs(blockNumber, LogType.UNENCRYPTED);
const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log));

const txLogs = block.newUnencryptedLogs.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log));
const logs: ExtendedUnencryptedL2Log[] = [];
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);

return { logs, maxLogsHit };
}

#filterLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse {
#filterUnencryptedLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse {
const start =
filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
const end = filter.toBlock;
Expand All @@ -466,12 +460,7 @@ export class LMDBArchiverStore implements ArchiverDataStore {
let maxLogsHit = false;

loopOverBlocks: for (const blockNumber of blockNumbers) {
const block = this.#getBlock(blockNumber, true);
if (!block || !block.newUnencryptedLogs) {
continue;
}

const unencryptedLogsInBlock = block.newUnencryptedLogs;
const unencryptedLogsInBlock = this.#getBlockLogs(blockNumber, LogType.UNENCRYPTED);
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < unencryptedLogsInBlock.txLogs.length; txIndex++) {
const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log));
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
Expand Down Expand Up @@ -632,7 +621,10 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return undefined;
}

const block = L2Block.fromBuffer(asBuffer(blockCtx.block));
const block = L2Block.fromBuffer(
asBuffer(blockCtx.block),
blockCtx.blockHash ? asBuffer(blockCtx.blockHash) : undefined,
);

if (withLogs) {
if (blockCtx.encryptedLogs) {
Expand All @@ -647,6 +639,17 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return block;
}

#getBlockLogs(blockNumber: number, logType: LogType): L2BlockL2Logs {
const blockCtx = this.#tables.blocks.get(blockNumber);
const logs = blockCtx?.[logType === LogType.ENCRYPTED ? 'encryptedLogs' : 'unencryptedLogs'];

if (!logs) {
return new L2BlockL2Logs([]);
}

return L2BlockL2Logs.fromBuffer(asBuffer(logs));
}

#computeBlockRange(start: number, limit: number): Required<Pick<RangeOptions, 'start' | 'end'>> {
if (limit < 1) {
throw new Error(`Invalid limit: ${limit}`);
Expand Down
28 changes: 16 additions & 12 deletions yarn-project/circuit-types/src/l2_block.ts
Original file line number Diff line number Diff line change
Expand Up @@ -321,9 +321,10 @@ export class L2Block {
/**
* Deserializes L2 block without logs from a buffer.
* @param buf - A serialized L2 block.
* @param blockHash - The hash of the block.
* @returns Deserialized L2 block.
*/
static fromBuffer(buf: Buffer | BufferReader) {
static fromBuffer(buf: Buffer | BufferReader, blockHash?: Buffer) {
const reader = BufferReader.asReader(buf);
const globalVariables = reader.readObject(GlobalVariables);
// TODO(#3938): update the encoding here
Expand Down Expand Up @@ -358,17 +359,20 @@ export class L2Block {
// TODO(#3938): populate bodyHash
const header = new Header(startArchiveSnapshot, [Fr.ZERO, Fr.ZERO], state, globalVariables);

return L2Block.fromFields({
archive: endArchiveSnapshot,
header,
newCommitments,
newNullifiers,
newPublicDataWrites,
newL2ToL1Msgs,
newContracts,
newContractData,
newL1ToL2Messages,
});
return L2Block.fromFields(
{
archive: endArchiveSnapshot,
header,
newCommitments,
newNullifiers,
newPublicDataWrites,
newL2ToL1Msgs,
newContracts,
newContractData,
newL1ToL2Messages,
},
blockHash,
);
}

/**
Expand Down

0 comments on commit 72fe71a

Please sign in to comment.