Skip to content

Commit

Permalink
refactor: updating block hash to be header.hash() (AztecProtocol#4286)
Browse files Browse the repository at this point in the history
Fixes AztecProtocol#3941
+ renamed AppendOnlyTreeSnapshot.empty() as
AppendOnlyTreeSnapshot.zero() to have it consistent with Noir

**Note**: I did a large refactor of `MerkleTrees` class because with
this change a lot of stuff there just didn't make sense anymore. Overall
I think it's much cleaner now.
  • Loading branch information
benesjan authored Jan 31, 2024
1 parent 7fa80db commit d4125e1
Show file tree
Hide file tree
Showing 40 changed files with 281 additions and 638 deletions.
2 changes: 1 addition & 1 deletion l1-contracts/src/periphery/ContractDeploymentEmitter.sol
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ contract ContractDeploymentEmitter is IContractDeploymentEmitter {
* @param _saltedInitializationHash - Salted init hash
* @param _publicKeyHash - Public key hash
* @param _acir - The acir bytecode of the L2 contract
* @dev See the link bellow for more info on partial address and public key:
* @dev See the link below for more info on partial address and public key:
* https://github.com/AztecProtocol/aztec-packages/blob/master/docs/docs/concepts/foundation/accounts/keys.md#addresses-partial-addresses-and-public-keys
* TODO: replace the link above with the link to deployed docs
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ interface IContractDeploymentEmitter {
* @param saltedInitializationHash - Salted init hash
* @param publicKeyHash - Public key hash
* @param acir - The acir bytecode of the L2 contract
* @dev See the link bellow for more info on partial address and public key:
* @dev See the link below for more info on partial address and public key:
* https://github.com/AztecProtocol/aztec-packages/blob/master/docs/docs/concepts/foundation/accounts/keys.md#addresses-partial-addresses-and-public-keys
* TODO: replace the link above with the link to deployed docs
*/
Expand Down
6 changes: 1 addition & 5 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -297,11 +297,7 @@ export class Archiver implements ArchiveSource {
retrievedBlocks.retrievedData.map(block => {
// Ensure we pad the L1 to L2 message array to the full size before storing.
block.newL1ToL2Messages = padArrayEnd(block.newL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
return L2Block.fromFields(
omit(block, ['newEncryptedLogs', 'newUnencryptedLogs']),
block.getBlockHash(),
block.getL1BlockNumber(),
);
return L2Block.fromFields(omit(block, ['newEncryptedLogs', 'newUnencryptedLogs']), block.getL1BlockNumber());
}),
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ type BlockContext = {
blockNumber: number;
l1BlockNumber: bigint;
block: Buffer;
blockHash: Buffer;
};

/**
Expand Down Expand Up @@ -46,7 +45,6 @@ export class BlockStore {
blockNumber: block.number,
block: block.toBuffer(),
l1BlockNumber: block.getL1BlockNumber(),
blockHash: block.getBlockHash(),
});

for (const [i, tx] of block.getTxs().entries()) {
Expand Down Expand Up @@ -77,7 +75,7 @@ export class BlockStore {
*/
*getBlocks(start: number, limit: number): IterableIterator<L2Block> {
for (const blockCtx of this.#blocks.values(this.#computeBlockRange(start, limit))) {
yield L2Block.fromBuffer(blockCtx.block, blockCtx.blockHash);
yield L2Block.fromBuffer(blockCtx.block);
}
}

Expand All @@ -92,9 +90,7 @@ export class BlockStore {
return undefined;
}

const block = L2Block.fromBuffer(blockCtx.block, blockCtx.blockHash);

return block;
return L2Block.fromBuffer(blockCtx.block);
}

/**
Expand Down
13 changes: 3 additions & 10 deletions yarn-project/aztec-node/src/aztec-node/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import {
ARCHIVE_HEIGHT,
CONTRACT_TREE_HEIGHT,
Fr,
GlobalVariables,
Header,
L1_TO_L2_MSG_TREE_HEIGHT,
NOTE_HASH_TREE_HEIGHT,
Expand All @@ -35,7 +34,7 @@ import {
PUBLIC_DATA_TREE_HEIGHT,
PublicDataTreeLeafPreimage,
} from '@aztec/circuits.js';
import { computeGlobalsHash, computePublicDataTreeLeafSlot } from '@aztec/circuits.js/abis';
import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/abis';
import { L1ContractAddresses, createEthereumChain } from '@aztec/ethereum';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { createDebugLogger } from '@aztec/foundation/log';
Expand All @@ -45,7 +44,6 @@ import {
GlobalVariableBuilder,
PublicProcessorFactory,
SequencerClient,
buildInitialHeader,
getGlobalVariableBuilder,
} from '@aztec/sequencer-client';
import { SiblingPath } from '@aztec/types/membership';
Expand Down Expand Up @@ -537,7 +535,7 @@ export class AztecNodeService implements AztecNode {

// No block was not found so we build the initial header.
const committedDb = await this.#getWorldState('latest');
return await buildInitialHeader(committedDb);
return await committedDb.buildInitialHeader();
}

/**
Expand All @@ -549,16 +547,11 @@ export class AztecNodeService implements AztecNode {
const blockNumber = (await this.blockSource.getBlockNumber()) + 1;
const newGlobalVariables = await this.globalVariableBuilder.buildGlobalVariables(new Fr(blockNumber));
const prevHeader = (await this.blockSource.getBlock(-1))?.header;
const prevGlobalVariables = prevHeader?.globalVariables ?? GlobalVariables.empty();

// Instantiate merkle trees so uncommitted updates by this simulation are local to it.
// TODO we should be able to remove this after https://github.com/AztecProtocol/aztec-packages/issues/1869
// So simulation of public functions doesn't affect the merkle trees.
const merkleTrees = new MerkleTrees(this.merkleTreesDb, this.log);
const globalVariablesHash = computeGlobalsHash(prevGlobalVariables);
await merkleTrees.init({
globalVariablesHash,
});
const merkleTrees = await MerkleTrees.new(this.merkleTreesDb, this.log);

const publicProcessorFactory = new PublicProcessorFactory(
merkleTrees.asLatest(),
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/aztec-nr/aztec/src/oracle/header.nr
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub fn get_header_at(block_number: u32, context: PrivateContext) -> Header {
let header = get_header_at_internal(block_number);

// 4) Compute the block hash from the block header
let block_hash = header.block_hash();
let block_hash = header.hash();

// 5) Get the membership witness of the block in the archive
let witness = get_archive_membership_witness(last_archive_block_number, block_hash);
Expand Down
65 changes: 27 additions & 38 deletions yarn-project/circuit-types/src/l2_block.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import {
} from '@aztec/circuits.js';
import { makeAppendOnlyTreeSnapshot, makeHeader } from '@aztec/circuits.js/factories';
import { times } from '@aztec/foundation/collection';
import { keccak, sha256 } from '@aztec/foundation/crypto';
import { sha256 } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { createDebugLogger } from '@aztec/foundation/log';
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
Expand Down Expand Up @@ -91,7 +91,6 @@ export class L2Block {
public newL1ToL2Messages: Fr[] = [],
newEncryptedLogs?: L2BlockL2Logs,
newUnencryptedLogs?: L2BlockL2Logs,
private blockHash?: Buffer,
l1BlockNumber?: bigint,
) {
if (newCommitments.length % MAX_NEW_COMMITMENTS_PER_TX !== 0) {
Expand Down Expand Up @@ -173,7 +172,6 @@ export class L2Block {
newEncryptedLogs,
newUnencryptedLogs,
},
undefined,
// just for testing purposes, each random L2 block got emitted in the equivalent L1 block
BigInt(l2BlockNum),
);
Expand Down Expand Up @@ -229,7 +227,6 @@ export class L2Block {
*/
newUnencryptedLogs?: L2BlockL2Logs;
},
blockHash?: Buffer,
l1BlockNumber?: bigint,
) {
return new this(
Expand All @@ -244,7 +241,6 @@ export class L2Block {
fields.newL1ToL2Messages,
fields.newEncryptedLogs,
fields.newUnencryptedLogs,
blockHash,
l1BlockNumber,
);
}
Expand Down Expand Up @@ -329,10 +325,9 @@ export class L2Block {
/**
* Deserializes L2 block without logs from a buffer.
* @param buf - A serialized L2 block.
* @param blockHash - The hash of the block.
* @returns Deserialized L2 block.
*/
static fromBuffer(buf: Buffer | BufferReader, blockHash?: Buffer) {
static fromBuffer(buf: Buffer | BufferReader) {
const reader = BufferReader.asReader(buf);
const header = reader.readObject(Header);
const archive = reader.readObject(AppendOnlyTreeSnapshot);
Expand All @@ -345,20 +340,17 @@ export class L2Block {
// TODO(sean): could an optimization of this be that it is encoded such that zeros are assumed
const newL1ToL2Messages = reader.readVector(Fr);

return L2Block.fromFields(
{
archive,
header,
newCommitments,
newNullifiers,
newPublicDataWrites,
newL2ToL1Msgs,
newContracts,
newContractData,
newL1ToL2Messages,
},
blockHash,
);
return L2Block.fromFields({
archive,
header,
newCommitments,
newNullifiers,
newPublicDataWrites,
newL2ToL1Msgs,
newContracts,
newContractData,
newL1ToL2Messages,
});
}

/**
Expand Down Expand Up @@ -441,14 +433,11 @@ export class L2Block {
}

/**
* Returns the block's hash.
* Returns the block's hash (hash of block header).
* @returns The block's hash.
*/
public getBlockHash(): Buffer {
if (!this.blockHash) {
this.blockHash = keccak(this.toBufferWithLogs());
}
return this.blockHash;
public hash(): Fr {
return this.header.hash();
}

/**
Expand All @@ -460,11 +449,11 @@ export class L2Block {
const buf = serializeToBuffer(
this.header.globalVariables,
// TODO(#3868)
AppendOnlyTreeSnapshot.empty(), // this.startNoteHashTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startNullifierTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startContractTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startPublicDataTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startL1ToL2MessageTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startNoteHashTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startNullifierTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startContractTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startPublicDataTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startL1ToL2MessageTreeSnapshot,
this.header.lastArchive,
this.header.state.partial.noteHashTree,
this.header.state.partial.nullifierTree,
Expand All @@ -487,11 +476,11 @@ export class L2Block {
const inputValue = serializeToBuffer(
new Fr(Number(this.header.globalVariables.blockNumber.toBigInt()) - 1),
// TODO(#3868)
AppendOnlyTreeSnapshot.empty(), // this.startNoteHashTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startNullifierTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startContractTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startPublicDataTreeSnapshot,
AppendOnlyTreeSnapshot.empty(), // this.startL1ToL2MessageTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startNoteHashTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startNullifierTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startContractTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startPublicDataTreeSnapshot,
AppendOnlyTreeSnapshot.zero(), // this.startL1ToL2MessageTreeSnapshot,
this.header.lastArchive,
);
return sha256(inputValue);
Expand Down Expand Up @@ -639,7 +628,7 @@ export class L2Block {
newL2ToL1Msgs,
newContracts,
newContractData,
this.getBlockHash(),
this.hash(),
Number(this.header.globalVariables.blockNumber.toBigInt()),
);
}
Expand Down
8 changes: 4 additions & 4 deletions yarn-project/circuit-types/src/l2_tx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ export class L2Tx {
/**
* The unique identifier of the block containing the transaction.
*/
public blockHash: Buffer,
public blockHash: Fr,
/**
* The block number in which the transaction was included.
*/
Expand All @@ -80,7 +80,7 @@ export class L2Tx {
reader.readVector(Fr),
reader.readVector(Fr),
reader.readVector(ContractData),
reader.readBytes(Fr.SIZE_IN_BYTES),
Fr.fromBuffer(reader),
reader.readNumber(),
);
}
Expand All @@ -106,7 +106,7 @@ export class L2Tx {
new Vector(this.newL2ToL1Msgs).toBuffer(),
new Vector(this.newContracts).toBuffer(),
new Vector(this.newContractData).toBuffer(),
this.blockHash,
this.blockHash.toBuffer(),
numToUInt32BE(this.blockNumber),
]);
}
Expand All @@ -127,7 +127,7 @@ export class L2Tx {
times(rand(0, MAX_NEW_L2_TO_L1_MSGS_PER_TX), Fr.random),
times(rand(0, MAX_NEW_CONTRACTS_PER_TX), Fr.random),
times(rand(0, MAX_NEW_CONTRACTS_PER_TX), ContractData.random),
Fr.random().toBuffer(),
Fr.random(),
123,
);
}
Expand Down
Loading

0 comments on commit d4125e1

Please sign in to comment.