From b51e9085c8e386789bf9d128978ea6f0c1f94178 Mon Sep 17 00:00:00 2001 From: LHerskind Date: Thu, 22 Aug 2024 14:27:05 +0000 Subject: [PATCH 001/123] feat: cleanup publisher --- l1-contracts/src/core/Rollup.sol | 156 ++++++++++++++--- l1-contracts/src/core/interfaces/IRollup.sol | 13 ++ .../src/core/libraries/DataStructures.sol | 5 + l1-contracts/src/core/libraries/Errors.sol | 1 + .../src/core/sequencer_selection/Leonidas.sol | 36 ++-- l1-contracts/test/fixtures/empty_block_1.json | 12 +- l1-contracts/test/fixtures/empty_block_2.json | 14 +- l1-contracts/test/fixtures/mixed_block_1.json | 12 +- l1-contracts/test/fixtures/mixed_block_2.json | 14 +- l1-contracts/test/sparta/DevNet.t.sol | 2 +- .../src/publisher/l1-publisher.ts | 109 ++++++++---- .../src/sequencer/sequencer.ts | 159 ++++++------------ 12 files changed, 320 insertions(+), 213 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 773c2fc6759..02a3bdda097 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -17,6 +17,8 @@ import {Errors} from "./libraries/Errors.sol"; import {Constants} from "./libraries/ConstantsGen.sol"; import {MerkleLib} from "./libraries/MerkleLib.sol"; import {SignatureLib} from "./sequencer_selection/SignatureLib.sol"; +import {SafeCast} from "@oz/utils/math/SafeCast.sol"; +import {DataStructures} from "./libraries/DataStructures.sol"; // Contracts import {MockVerifier} from "../mock/MockVerifier.sol"; @@ -31,6 +33,8 @@ import {Leonidas} from "./sequencer_selection/Leonidas.sol"; * not giving a damn about gas costs. */ contract Rollup is Leonidas, IRollup, ITestRollup { + using SafeCast for uint256; + struct BlockLog { bytes32 archive; bytes32 blockHash; @@ -381,6 +385,90 @@ contract Rollup is Leonidas, IRollup, ITestRollup { return blocks[_blockNumber].archive; } + /** + * @notice Check if a proposer can propose at a given time + * + * @param _ts - The timestamp to check + * @param _proposer - The proposer to check + * @param _archive - The archive to check (should be the latest archive) + * + * @return uint256 - The slot at the given timestamp + * @return uint256 - The block number at the given timestamp + */ + function canProposeAtTime(uint256 _ts, address _proposer, bytes32 _archive) + external + view + override(IRollup) + returns (uint256, uint256) + { + uint256 slot = getSlotAt(_ts); + + uint256 lastSlot = uint256(blocks[pendingBlockCount - 1].slotNumber); + if (slot <= lastSlot) { + revert Errors.Rollup__SlotAlreadyInChain(lastSlot, slot); + } + + bytes32 tipArchive = archive(); + if (tipArchive != _archive) { + revert Errors.Rollup__InvalidArchive(tipArchive, _archive); + } + + if (isDevNet) { + _devnetSequencerSubmissionChecks(_proposer); + } else { + address proposer = getProposerAt(_ts); + if (proposer != address(0) && proposer != _proposer) { + revert Errors.Leonidas__InvalidProposer(proposer, _proposer); + } + } + + return (slot, pendingBlockCount); + } + + /** + * @notice Validate a header for submission + * + * @dev This is a convenience function that can be used by the sequencer to validate a "partial" header + * without having to deal with viem or anvil for simulating timestamps in the future. + * + * @param _header - The header to validate + * @param _signatures - The signatures to validate + * @param _digest - The digest to validate + * @param _currentTime - The current time + * @param _flags - The flags to validate + */ + function validateHeader( + bytes calldata _header, + SignatureLib.Signature[] memory _signatures, + bytes32 _digest, + uint256 _currentTime, + DataStructures.ExecutionFlags memory _flags + ) external view override(IRollup) { + // @note This is a convenience function to allow for easier testing of the header validation + // without having to go through the entire process of submitting a block. + // This is not used in production code. + + HeaderLib.Header memory header = HeaderLib.decode(_header); + _validateHeader(header, _signatures, _digest, _currentTime, _flags); + } + + function _validateHeader( + HeaderLib.Header memory _header, + SignatureLib.Signature[] memory _signatures, + bytes32 _digest, + uint256 _currentTime, + DataStructures.ExecutionFlags memory _flags + ) internal view { + // @note This is a convenience function to allow for easier testing of the header validation + // without having to go through the entire process of submitting a block. + // This is not used in production code. + + _validateHeaderForSubmissionBase(_header, _currentTime, _flags); + _validateHeaderForSubmissionSequencerSelection( + _header.globalVariables.slotNumber, _signatures, _digest, _currentTime, _flags + ); + } + /** * @notice Processes an incoming L2 block with signatures * @@ -397,15 +485,19 @@ contract Rollup is Leonidas, IRollup, ITestRollup { ) public override(IRollup) { // Decode and validate header HeaderLib.Header memory header = HeaderLib.decode(_header); - _validateHeaderForSubmissionBase(header); - _validateHeaderForSubmissionSequencerSelection(header, _signatures, _archive); + setupEpoch(); + _validateHeader({ + _header: header, + _signatures: _signatures, + _digest: _archive, + _currentTime: block.timestamp, + _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) + }); - // As long as the header is passing validity check in `_validateHeaderForSubmissionBase` we can safely cast - // the slot number to uint128 blocks[pendingBlockCount++] = BlockLog({ archive: _archive, blockHash: _blockHash, - slotNumber: uint128(header.globalVariables.slotNumber), + slotNumber: header.globalVariables.slotNumber.toUint128(), isProven: false }); @@ -511,15 +603,17 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * * @dev While in isDevNet, we allow skipping all of the checks as we simply assume only TRUSTED sequencers * - * @param _header - The header to validate + * @param _slot - The slot of the header to validate * @param _signatures - The signatures to validate - * @param _archive - The archive root of the block + * @param _digest - The digest that signatures sign over */ function _validateHeaderForSubmissionSequencerSelection( - HeaderLib.Header memory _header, + uint256 _slot, SignatureLib.Signature[] memory _signatures, - bytes32 _archive - ) internal { + bytes32 _digest, + uint256 _currentTime, + DataStructures.ExecutionFlags memory _flags + ) internal view { if (isDevNet) { // @note If we are running in a devnet, we don't want to perform all the consensus // checks, we instead simply require that either there are NO validators or @@ -528,22 +622,15 @@ contract Rollup is Leonidas, IRollup, ITestRollup { // This means that we relaxes the condition that the block must land in the // correct slot and epoch to make it more fluid for the devnet launch // or for testing. - if (getValidatorCount() == 0) { - return; - } - if (!isValidator(msg.sender)) { - revert Errors.Leonidas__InvalidProposer(getValidatorAt(0), msg.sender); - } + _devnetSequencerSubmissionChecks(msg.sender); return; } - uint256 slot = _header.globalVariables.slotNumber; - // Ensure that the slot proposed is NOT in the future - uint256 currentSlot = getCurrentSlot(); - if (slot != currentSlot) { - revert Errors.HeaderLib__InvalidSlotNumber(currentSlot, slot); + uint256 currentSlot = getSlotAt(_currentTime); + if (_slot != currentSlot) { + revert Errors.HeaderLib__InvalidSlotNumber(currentSlot, _slot); } // @note We are currently enforcing that the slot is in the current epoch @@ -551,13 +638,13 @@ contract Rollup is Leonidas, IRollup, ITestRollup { // of an entire epoch if no-one from the new epoch committee have seen // those blocks or behaves as if they did not. - uint256 epochNumber = getEpochAt(getTimestampForSlot(slot)); - uint256 currentEpoch = getCurrentEpoch(); + uint256 epochNumber = getEpochAt(getTimestampForSlot(_slot)); + uint256 currentEpoch = getEpochAt(_currentTime); if (epochNumber != currentEpoch) { revert Errors.Rollup__InvalidEpoch(currentEpoch, epochNumber); } - _processPendingBlock(epochNumber, slot, _signatures, _archive); + _processPendingBlock(_slot, _signatures, _digest, _flags); } /** @@ -577,7 +664,11 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * * @param _header - The header to validate */ - function _validateHeaderForSubmissionBase(HeaderLib.Header memory _header) internal view { + function _validateHeaderForSubmissionBase( + HeaderLib.Header memory _header, + uint256 _currentTime, + DataStructures.ExecutionFlags memory _flags + ) internal view { if (block.chainid != _header.globalVariables.chainId) { revert Errors.Rollup__InvalidChainId(block.chainid, _header.globalVariables.chainId); } @@ -613,8 +704,21 @@ contract Rollup is Leonidas, IRollup, ITestRollup { } // Check if the data is available using availability oracle (change availability oracle if you want a different DA layer) - if (!AVAILABILITY_ORACLE.isAvailable(_header.contentCommitment.txsEffectsHash)) { + if ( + !_flags.ignoreDA && !AVAILABILITY_ORACLE.isAvailable(_header.contentCommitment.txsEffectsHash) + ) { revert Errors.Rollup__UnavailableTxs(_header.contentCommitment.txsEffectsHash); } } + + function _devnetSequencerSubmissionChecks(address _proposer) internal view { + if (getValidatorCount() == 0) { + return; + } + + if (!isValidator(_proposer)) { + revert Errors.DevNet__InvalidProposer(getValidatorAt(0), _proposer); + } + return; + } } diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index a2f76ee511d..4e129e052b5 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -6,6 +6,7 @@ import {IInbox} from "../interfaces/messagebridge/IInbox.sol"; import {IOutbox} from "../interfaces/messagebridge/IOutbox.sol"; import {SignatureLib} from "../sequencer_selection/SignatureLib.sol"; +import {DataStructures} from "../libraries/DataStructures.sol"; interface ITestRollup { function setDevNet(bool _devNet) external; @@ -20,6 +21,18 @@ interface IRollup { event ProgressedState(uint256 provenBlockCount, uint256 pendingBlockCount); event PrunedPending(uint256 provenBlockCount, uint256 pendingBlockCount); + function canProposeAtTime(uint256 _ts, address _proposer, bytes32 _archive) + external + view + returns (uint256, uint256); + function validateHeader( + bytes calldata _header, + SignatureLib.Signature[] memory _signatures, + bytes32 _digest, + uint256 _currentTime, + DataStructures.ExecutionFlags memory _flags + ) external view; + function prune() external; function INBOX() external view returns (IInbox); diff --git a/l1-contracts/src/core/libraries/DataStructures.sol b/l1-contracts/src/core/libraries/DataStructures.sol index 0bdc315b6de..5462e09fa1b 100644 --- a/l1-contracts/src/core/libraries/DataStructures.sol +++ b/l1-contracts/src/core/libraries/DataStructures.sol @@ -74,4 +74,9 @@ library DataStructures { uint256 blockNumber; } // docs:end:registry_snapshot + + struct ExecutionFlags { + bool ignoreDA; + bool ignoreSignatures; + } } diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index f66a393e7bb..257d7695707 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -12,6 +12,7 @@ pragma solidity >=0.8.18; library Errors { // DEVNET related error DevNet__NoPruningAllowed(); // 0x6984c590 + error DevNet__InvalidProposer(address expected, address actual); // 0x11e6e6f7 // Inbox error Inbox__Unauthorized(); // 0xe5336a6b diff --git a/l1-contracts/src/core/sequencer_selection/Leonidas.sol b/l1-contracts/src/core/sequencer_selection/Leonidas.sol index 33826e6f563..709e6fad153 100644 --- a/l1-contracts/src/core/sequencer_selection/Leonidas.sol +++ b/l1-contracts/src/core/sequencer_selection/Leonidas.sol @@ -2,6 +2,7 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.18; +import {DataStructures} from "../libraries/DataStructures.sol"; import {Errors} from "../libraries/Errors.sol"; import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; import {Ownable} from "@oz/access/Ownable.sol"; @@ -349,29 +350,18 @@ contract Leonidas is Ownable, ILeonidas { * - If the proposer is not the real proposer AND the proposer is not open * - If the number of valid attestations is insufficient * - * @param _epochNumber - The epoch number of the block * @param _slot - The slot of the block * @param _signatures - The signatures of the committee members * @param _digest - The digest of the block */ function _processPendingBlock( - uint256 _epochNumber, uint256 _slot, SignatureLib.Signature[] memory _signatures, - bytes32 _digest - ) internal { - // @note Setup the CURRENT epoch if not already done. - // not necessarily the one we are processing! - setupEpoch(); - - Epoch storage epoch = epochs[_epochNumber]; - - // We should never enter this case because of `setupEpoch` - if (epoch.sampleSeed == 0) { - revert Errors.Leonidas__EpochNotSetup(); - } - - address proposer = getProposerAt(getTimestampForSlot(_slot)); + bytes32 _digest, + DataStructures.ExecutionFlags memory _flags + ) internal view { + uint256 ts = getTimestampForSlot(_slot); + address proposer = getProposerAt(ts); // If the proposer is open, we allow anyone to propose without needing any signatures if (proposer == address(0)) { @@ -383,7 +373,17 @@ contract Leonidas is Ownable, ILeonidas { revert Errors.Leonidas__InvalidProposer(proposer, msg.sender); } - uint256 needed = epoch.committee.length * 2 / 3 + 1; + // @note This is NOT the efficient way to do it, but it is a very convenient way for us to do it + // that allows us to reduce the number of code paths. Also when changed with optimistic for + // pleistarchus, this will be changed, so we can live with it. + + if (_flags.ignoreSignatures) { + return; + } + + address[] memory committee = getCommitteeAt(ts); + + uint256 needed = committee.length * 2 / 3 + 1; if (_signatures.length < needed) { revert Errors.Leonidas__InsufficientAttestationsProvided(needed, _signatures.length); } @@ -400,7 +400,7 @@ contract Leonidas is Ownable, ILeonidas { } // The verification will throw if invalid - signature.verify(epoch.committee[i], ethSignedDigest); + signature.verify(committee[i], ethSignedDigest); validAttestations++; } diff --git a/l1-contracts/test/fixtures/empty_block_1.json b/l1-contracts/test/fixtures/empty_block_1.json index 20b938cbc40..4f277922e9d 100644 --- a/l1-contracts/test/fixtures/empty_block_1.json +++ b/l1-contracts/test/fixtures/empty_block_1.json @@ -8,7 +8,7 @@ "l2ToL1Messages": [] }, "block": { - "archive": "0x1fc4515430abede0c269e0aaf99fe032b4368b094b2a399d112144d8e0b4b803", + "archive": "0x2ee6f8720f80fcc063c1042327b734823d51455e444f72fafc9af975f18ab9c5", "body": "0x00000000", "txsEffectsHash": "0x00e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d6", "decodedHeader": { @@ -22,10 +22,10 @@ "blockNumber": 1, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000033", "chainId": 31337, - "timestamp": 1724321038, + "timestamp": 1724858655, "version": 1, - "coinbase": "0x69d2d2c697a0ac4a874c591f6906706af27eb737", - "feeRecipient": "0x2c6280804920e2ecb139fe6185aeba95ee3687e64a14ff68a72a25ab9bb0d5eb", + "coinbase": "0xf6ed6ac83b91e8a719e552711e90249ae6fb95e0", + "feeRecipient": "0x28e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -56,8 +56,8 @@ } } }, - "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001000000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000066c70d0e69d2d2c697a0ac4a874c591f6906706af27eb7372c6280804920e2ecb139fe6185aeba95ee3687e64a14ff68a72a25ab9bb0d5eb000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00acfc19a39d0814b57d47fbf843284cd2d293382e82dfcaafc819daf89b81b5", + "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001000000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000066cf411ff6ed6ac83b91e8a719e552711e90249ae6fb95e028e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x00bc2933646ffc5ac63d957ffb345230d5fc0bfc14359cd13f3a77b60c5d1e1a", "numTxs": 0 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/empty_block_2.json b/l1-contracts/test/fixtures/empty_block_2.json index 0142912e043..ecbf0057f01 100644 --- a/l1-contracts/test/fixtures/empty_block_2.json +++ b/l1-contracts/test/fixtures/empty_block_2.json @@ -8,7 +8,7 @@ "l2ToL1Messages": [] }, "block": { - "archive": "0x2da3733e9f6522fcc8016aff753cb69100051aae6f3612a2180959adfd3293f6", + "archive": "0x0d67087b909c777b9b4c429d421047608d3b5113d6525f024403ba72f0e8c039", "body": "0x00000000", "txsEffectsHash": "0x00e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d6", "decodedHeader": { @@ -22,10 +22,10 @@ "blockNumber": 2, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000035", "chainId": 31337, - "timestamp": 1724321062, + "timestamp": 1724858679, "version": 1, - "coinbase": "0x69d2d2c697a0ac4a874c591f6906706af27eb737", - "feeRecipient": "0x2c6280804920e2ecb139fe6185aeba95ee3687e64a14ff68a72a25ab9bb0d5eb", + "coinbase": "0xf6ed6ac83b91e8a719e552711e90249ae6fb95e0", + "feeRecipient": "0x28e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -33,7 +33,7 @@ }, "lastArchive": { "nextAvailableLeafIndex": 2, - "root": "0x1fc4515430abede0c269e0aaf99fe032b4368b094b2a399d112144d8e0b4b803" + "root": "0x2ee6f8720f80fcc063c1042327b734823d51455e444f72fafc9af975f18ab9c5" }, "stateReference": { "l1ToL2MessageTree": { @@ -56,8 +56,8 @@ } } }, - "header": "0x1fc4515430abede0c269e0aaf99fe032b4368b094b2a399d112144d8e0b4b80300000002000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000200b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000010019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000018023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000066c70d2669d2d2c697a0ac4a874c591f6906706af27eb7372c6280804920e2ecb139fe6185aeba95ee3687e64a14ff68a72a25ab9bb0d5eb000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00318295aad2a231c68b825b4fca99e2cbb1345c82ee6d01888289771199d1b9", + "header": "0x2ee6f8720f80fcc063c1042327b734823d51455e444f72fafc9af975f18ab9c500000002000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000200b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000010019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000018023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000066cf4137f6ed6ac83b91e8a719e552711e90249ae6fb95e028e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x00d1b60b5c9c83f637568ce4355e0127dc27d65edd9f632ab7fb3b00c9fd2199", "numTxs": 0 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/mixed_block_1.json b/l1-contracts/test/fixtures/mixed_block_1.json index 7d6b8e45d9f..a74f4b6d39a 100644 --- a/l1-contracts/test/fixtures/mixed_block_1.json +++ b/l1-contracts/test/fixtures/mixed_block_1.json @@ -58,7 +58,7 @@ ] }, "block": { - "archive": "0x2e509ada109d80ef634c0eca74fecd28b17727847e3b21cf01961c73b1b58978", + "archive": "0x16ff2b46f1343feaee80c00c6b6c8e4a403a5027f1a8127caa5e7c7d67b0e96a", "body": "0x00000004000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000001420000000000000000000000000000000000000000000000000000000000000143000000000000000000000000000000000000000000000000000000000000014400000000000000000000000000000000000000000000000000000000000001450000000000000000000000000000000000000000000000000000000000000146000000000000000000000000000000000000000000000000000000000000014700000000000000000000000000000000000000000000000000000000000001480000000000000000000000000000000000000000000000000000000000000149000000000000000000000000000000000000000000000000000000000000014a000000000000000000000000000000000000000000000000000000000000014b000000000000000000000000000000000000000000000000000000000000014c000000000000000000000000000000000000000000000000000000000000014d000000000000000000000000000000000000000000000000000000000000014e000000000000000000000000000000000000000000000000000000000000014f0000000000000000000000000000000000000000000000000000000000000150000000000000000000000000000000000000000000000000000000000000015100000000000000000000000000000000000000000000000000000000000001520000000000000000000000000000000000000000000000000000000000000153000000000000000000000000000000000000000000000000000000000000015400000000000000000000000000000000000000000000000000000000000001550000000000000000000000000000000000000000000000000000000000000156000000000000000000000000000000000000000000000000000000000000015700000000000000000000000000000000000000000000000000000000000001580000000000000000000000000000000000000000000000000000000000000159000000000000000000000000000000000000000000000000000000000000015a000000000000000000000000000000000000000000000000000000000000015b000000000000000000000000000000000000000000000000000000000000015c000000000000000000000000000000000000000000000000000000000000015d000000000000000000000000000000000000000000000000000000000000015e000000000000000000000000000000000000000000000000000000000000015f0000000000000000000000000000000000000000000000000000000000000160000000000000000000000000000000000000000000000000000000000000016100000000000000000000000000000000000000000000000000000000000001620000000000000000000000000000000000000000000000000000000000000163000000000000000000000000000000000000000000000000000000000000016400000000000000000000000000000000000000000000000000000000000001650000000000000000000000000000000000000000000000000000000000000166000000000000000000000000000000000000000000000000000000000000016700000000000000000000000000000000000000000000000000000000000001680000000000000000000000000000000000000000000000000000000000000169000000000000000000000000000000000000000000000000000000000000016a000000000000000000000000000000000000000000000000000000000000016b000000000000000000000000000000000000000000000000000000000000016c000000000000000000000000000000000000000000000000000000000000016d000000000000000000000000000000000000000000000000000000000000016e000000000000000000000000000000000000000000000000000000000000016f0000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000017100000000000000000000000000000000000000000000000000000000000001720000000000000000000000000000000000000000000000000000000000000173000000000000000000000000000000000000000000000000000000000000017400000000000000000000000000000000000000000000000000000000000001750000000000000000000000000000000000000000000000000000000000000176000000000000000000000000000000000000000000000000000000000000017700000000000000000000000000000000000000000000000000000000000001780000000000000000000000000000000000000000000000000000000000000179000000000000000000000000000000000000000000000000000000000000017a000000000000000000000000000000000000000000000000000000000000017b000000000000000000000000000000000000000000000000000000000000017c000000000000000000000000000000000000000000000000000000000000017d000000000000000000000000000000000000000000000000000000000000017e000000000000000000000000000000000000000000000000000000000000017f3f0000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e0800c47667396742a5474f325e2567bff3bb99b7f0bfd2b1a689b635d8b8726cce00284120278895e8d47084ae759f390e9634881e41369a257a36fe99a2369dc800a328b4a4a6ed156325253b4ab2af7ca00e21caf7963f0fba8a88ccdc3512c300705c99df420bface231f6855799db1d0ed7d39419abc47aa8c6efe2ae7aae2009299cc308de6d23788384411e024791a5b2448e455fbdd1d1f28f3ff76631f002d6c03d81ad764de51b0f34584645191cdc2aaae2ca08fb838d142b95d62f5003032f3618b2df0fa335d5fd548d6d85e42b4e7eb5fff9eb687facbbdecb8a60016cab7ddf4d1b440d53d10284c5c82a78b2d4e27dcdb44ef434ef4c6bad6783f0000000000000000000000000000000000000000000000000000000000000540000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000541000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000542000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000543000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000544000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000545000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005460000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000054700000000000000000000000000000000000000000000000000000000000005510000000000000000000000000000000000000000000000000000000000000548000000000000000000000000000000000000000000000000000000000000055200000000000000000000000000000000000000000000000000000000000005490000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000556000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000557000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000551000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000552000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005560000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000055700000000000000000000000000000000000000000000000000000000000005610000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000056200000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000566000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000567000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000561000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000562000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005660000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000056700000000000000000000000000000000000000000000000000000000000005710000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000057200000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000576000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000577000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000571000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000572000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000057f00000000000000000000000000000000000000000000000000000000000005760000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000057700000000000000000000000000000000000000000000000000000000000005810000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000058200000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000586000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000587000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000180000000000000000000000000000000000000000000000000000000000000018100000000000000000000000000000000000000000000000000000000000001820000000000000000000000000000000000000000000000000000000000000183000000000000000000000000000000000000000000000000000000000000018400000000000000000000000000000000000000000000000000000000000001850000000000000000000000000000000000000000000000000000000000000186000000000000000000000000000000000000000000000000000000000000018700000000000000000000000000000000000000000000000000000000000001880000000000000000000000000000000000000000000000000000000000000189000000000000000000000000000000000000000000000000000000000000018a000000000000000000000000000000000000000000000000000000000000018b000000000000000000000000000000000000000000000000000000000000018c000000000000000000000000000000000000000000000000000000000000018d000000000000000000000000000000000000000000000000000000000000018e000000000000000000000000000000000000000000000000000000000000018f0000000000000000000000000000000000000000000000000000000000000190000000000000000000000000000000000000000000000000000000000000019100000000000000000000000000000000000000000000000000000000000001920000000000000000000000000000000000000000000000000000000000000193000000000000000000000000000000000000000000000000000000000000019400000000000000000000000000000000000000000000000000000000000001950000000000000000000000000000000000000000000000000000000000000196000000000000000000000000000000000000000000000000000000000000019700000000000000000000000000000000000000000000000000000000000001980000000000000000000000000000000000000000000000000000000000000199000000000000000000000000000000000000000000000000000000000000019a000000000000000000000000000000000000000000000000000000000000019b000000000000000000000000000000000000000000000000000000000000019c000000000000000000000000000000000000000000000000000000000000019d000000000000000000000000000000000000000000000000000000000000019e000000000000000000000000000000000000000000000000000000000000019f00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000000000000000000000000000000001a100000000000000000000000000000000000000000000000000000000000001a200000000000000000000000000000000000000000000000000000000000001a300000000000000000000000000000000000000000000000000000000000001a400000000000000000000000000000000000000000000000000000000000001a500000000000000000000000000000000000000000000000000000000000001a600000000000000000000000000000000000000000000000000000000000001a700000000000000000000000000000000000000000000000000000000000001a800000000000000000000000000000000000000000000000000000000000001a900000000000000000000000000000000000000000000000000000000000001aa00000000000000000000000000000000000000000000000000000000000001ab00000000000000000000000000000000000000000000000000000000000001ac00000000000000000000000000000000000000000000000000000000000001ad00000000000000000000000000000000000000000000000000000000000001ae00000000000000000000000000000000000000000000000000000000000001af00000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b100000000000000000000000000000000000000000000000000000000000001b200000000000000000000000000000000000000000000000000000000000001b300000000000000000000000000000000000000000000000000000000000001b400000000000000000000000000000000000000000000000000000000000001b500000000000000000000000000000000000000000000000000000000000001b600000000000000000000000000000000000000000000000000000000000001b700000000000000000000000000000000000000000000000000000000000001b800000000000000000000000000000000000000000000000000000000000001b900000000000000000000000000000000000000000000000000000000000001ba00000000000000000000000000000000000000000000000000000000000001bb00000000000000000000000000000000000000000000000000000000000001bc00000000000000000000000000000000000000000000000000000000000001bd00000000000000000000000000000000000000000000000000000000000001be00000000000000000000000000000000000000000000000000000000000001bf3f0000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be0800789ff73d7787206612d96dfc2143f2344de21669a3f8cae7fe9a8918631eb00084a17f00bf8793b6851a106e9155543125e0be987ad3c8334456bdda171d0b00a400f8fd336ad84f467465964008238fd1b7f9c51c22912d706cd2b874d24e002c79bdd83c14ff50a46964f838ee207564909e28af79a57fc195810d36f9b20070083c6ef1e4dd88a064e94d2582283b203cf8a2ab1667f4370eda1b4c1fe8005373dffb5b590053d7762efcf9e11280f1486ce82e7996d94ee0f5d7c093bc009eefd90eb40e79c78bac1f71ec78bdc2f8b30041974239bdc765edffed813800ea95742e72792ca7a0f66ce9f55bc47dc09d5ea08c1b9018763102776978303f0000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000581000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000582000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005860000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000058700000000000000000000000000000000000000000000000000000000000005910000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000059200000000000000000000000000000000000000000000000000000000000005890000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000596000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000597000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000598000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005990000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000059a0000000000000000000000000000000000000000000000000000000000000591000000000000000000000000000000000000000000000000000000000000059b0000000000000000000000000000000000000000000000000000000000000592000000000000000000000000000000000000000000000000000000000000059c0000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000059d0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000059e0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000059f000000000000000000000000000000000000000000000000000000000000059600000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000000000000000059700000000000000000000000000000000000000000000000000000000000005a1000000000000000000000000000000000000000000000000000000000000059800000000000000000000000000000000000000000000000000000000000005a2000000000000000000000000000000000000000000000000000000000000059900000000000000000000000000000000000000000000000000000000000005a3000000000000000000000000000000000000000000000000000000000000059a00000000000000000000000000000000000000000000000000000000000005a4000000000000000000000000000000000000000000000000000000000000059b00000000000000000000000000000000000000000000000000000000000005a5000000000000000000000000000000000000000000000000000000000000059c00000000000000000000000000000000000000000000000000000000000005a6000000000000000000000000000000000000000000000000000000000000059d00000000000000000000000000000000000000000000000000000000000005a7000000000000000000000000000000000000000000000000000000000000059e00000000000000000000000000000000000000000000000000000000000005a8000000000000000000000000000000000000000000000000000000000000059f00000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005a100000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005a200000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005a300000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005a400000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005a500000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005a600000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005a700000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005a800000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005bf00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000001c100000000000000000000000000000000000000000000000000000000000001c200000000000000000000000000000000000000000000000000000000000001c300000000000000000000000000000000000000000000000000000000000001c400000000000000000000000000000000000000000000000000000000000001c500000000000000000000000000000000000000000000000000000000000001c600000000000000000000000000000000000000000000000000000000000001c700000000000000000000000000000000000000000000000000000000000001c800000000000000000000000000000000000000000000000000000000000001c900000000000000000000000000000000000000000000000000000000000001ca00000000000000000000000000000000000000000000000000000000000001cb00000000000000000000000000000000000000000000000000000000000001cc00000000000000000000000000000000000000000000000000000000000001cd00000000000000000000000000000000000000000000000000000000000001ce00000000000000000000000000000000000000000000000000000000000001cf00000000000000000000000000000000000000000000000000000000000001d000000000000000000000000000000000000000000000000000000000000001d100000000000000000000000000000000000000000000000000000000000001d200000000000000000000000000000000000000000000000000000000000001d300000000000000000000000000000000000000000000000000000000000001d400000000000000000000000000000000000000000000000000000000000001d500000000000000000000000000000000000000000000000000000000000001d600000000000000000000000000000000000000000000000000000000000001d700000000000000000000000000000000000000000000000000000000000001d800000000000000000000000000000000000000000000000000000000000001d900000000000000000000000000000000000000000000000000000000000001da00000000000000000000000000000000000000000000000000000000000001db00000000000000000000000000000000000000000000000000000000000001dc00000000000000000000000000000000000000000000000000000000000001dd00000000000000000000000000000000000000000000000000000000000001de00000000000000000000000000000000000000000000000000000000000001df00000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000001e100000000000000000000000000000000000000000000000000000000000001e200000000000000000000000000000000000000000000000000000000000001e300000000000000000000000000000000000000000000000000000000000001e400000000000000000000000000000000000000000000000000000000000001e500000000000000000000000000000000000000000000000000000000000001e600000000000000000000000000000000000000000000000000000000000001e700000000000000000000000000000000000000000000000000000000000001e800000000000000000000000000000000000000000000000000000000000001e900000000000000000000000000000000000000000000000000000000000001ea00000000000000000000000000000000000000000000000000000000000001eb00000000000000000000000000000000000000000000000000000000000001ec00000000000000000000000000000000000000000000000000000000000001ed00000000000000000000000000000000000000000000000000000000000001ee00000000000000000000000000000000000000000000000000000000000001ef00000000000000000000000000000000000000000000000000000000000001f000000000000000000000000000000000000000000000000000000000000001f100000000000000000000000000000000000000000000000000000000000001f200000000000000000000000000000000000000000000000000000000000001f300000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000001f500000000000000000000000000000000000000000000000000000000000001f600000000000000000000000000000000000000000000000000000000000001f700000000000000000000000000000000000000000000000000000000000001f800000000000000000000000000000000000000000000000000000000000001f900000000000000000000000000000000000000000000000000000000000001fa00000000000000000000000000000000000000000000000000000000000001fb00000000000000000000000000000000000000000000000000000000000001fc00000000000000000000000000000000000000000000000000000000000001fd00000000000000000000000000000000000000000000000000000000000001fe00000000000000000000000000000000000000000000000000000000000001ff3f00000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe0800a68d2df6e48c8b31f4c76529e586de2cd9d0f2f2fdfbc4c523db9e3a29e9b80016c236e57bf17afe324437acd1060772e3f31d4f9e734ad758d0627c4ba2a200d659011ddde95e32886bdd8c9464da1ca144ccadd539f0992b4abb491d812a00c8025bb9006a976ebd6ad940155f15f89ca0bb7312b53841fc257e7ed689c8004165f2c46b70fb183468b38f31bba7aa9d22ce8dfb61fe721470729ce1c6b100afeb60dd983514ebbaee141b2196f3eb3c9c299f576a0097872cc85a79a43f005f6bfee53d20a474901493558419dbceb3aca40e5e18915d38031498f4d2bb008791217ee341ec5dc11f7d7a31160fb1b1f2cf767062970e9526e5751956253f00000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005c800000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005c900000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005f600000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005f700000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005f800000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005f900000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005fa00000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005fb00000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005fc00000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005fd00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005fe00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ff00000000000000000000000000000000000000000000000000000000000005f6000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000005f7000000000000000000000000000000000000000000000000000000000000060100000000000000000000000000000000000000000000000000000000000005f8000000000000000000000000000000000000000000000000000000000000060200000000000000000000000000000000000000000000000000000000000005f9000000000000000000000000000000000000000000000000000000000000060300000000000000000000000000000000000000000000000000000000000005fa000000000000000000000000000000000000000000000000000000000000060400000000000000000000000000000000000000000000000000000000000005fb000000000000000000000000000000000000000000000000000000000000060500000000000000000000000000000000000000000000000000000000000005fc000000000000000000000000000000000000000000000000000000000000060600000000000000000000000000000000000000000000000000000000000005fd000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000005fe0000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020100000000000000000000000000000000000000000000000000000000000002020000000000000000000000000000000000000000000000000000000000000203000000000000000000000000000000000000000000000000000000000000020400000000000000000000000000000000000000000000000000000000000002050000000000000000000000000000000000000000000000000000000000000206000000000000000000000000000000000000000000000000000000000000020700000000000000000000000000000000000000000000000000000000000002080000000000000000000000000000000000000000000000000000000000000209000000000000000000000000000000000000000000000000000000000000020a000000000000000000000000000000000000000000000000000000000000020b000000000000000000000000000000000000000000000000000000000000020c000000000000000000000000000000000000000000000000000000000000020d000000000000000000000000000000000000000000000000000000000000020e000000000000000000000000000000000000000000000000000000000000020f0000000000000000000000000000000000000000000000000000000000000210000000000000000000000000000000000000000000000000000000000000021100000000000000000000000000000000000000000000000000000000000002120000000000000000000000000000000000000000000000000000000000000213000000000000000000000000000000000000000000000000000000000000021400000000000000000000000000000000000000000000000000000000000002150000000000000000000000000000000000000000000000000000000000000216000000000000000000000000000000000000000000000000000000000000021700000000000000000000000000000000000000000000000000000000000002180000000000000000000000000000000000000000000000000000000000000219000000000000000000000000000000000000000000000000000000000000021a000000000000000000000000000000000000000000000000000000000000021b000000000000000000000000000000000000000000000000000000000000021c000000000000000000000000000000000000000000000000000000000000021d000000000000000000000000000000000000000000000000000000000000021e000000000000000000000000000000000000000000000000000000000000021f0000000000000000000000000000000000000000000000000000000000000220000000000000000000000000000000000000000000000000000000000000022100000000000000000000000000000000000000000000000000000000000002220000000000000000000000000000000000000000000000000000000000000223000000000000000000000000000000000000000000000000000000000000022400000000000000000000000000000000000000000000000000000000000002250000000000000000000000000000000000000000000000000000000000000226000000000000000000000000000000000000000000000000000000000000022700000000000000000000000000000000000000000000000000000000000002280000000000000000000000000000000000000000000000000000000000000229000000000000000000000000000000000000000000000000000000000000022a000000000000000000000000000000000000000000000000000000000000022b000000000000000000000000000000000000000000000000000000000000022c000000000000000000000000000000000000000000000000000000000000022d000000000000000000000000000000000000000000000000000000000000022e000000000000000000000000000000000000000000000000000000000000022f0000000000000000000000000000000000000000000000000000000000000230000000000000000000000000000000000000000000000000000000000000023100000000000000000000000000000000000000000000000000000000000002320000000000000000000000000000000000000000000000000000000000000233000000000000000000000000000000000000000000000000000000000000023400000000000000000000000000000000000000000000000000000000000002350000000000000000000000000000000000000000000000000000000000000236000000000000000000000000000000000000000000000000000000000000023700000000000000000000000000000000000000000000000000000000000002380000000000000000000000000000000000000000000000000000000000000239000000000000000000000000000000000000000000000000000000000000023a000000000000000000000000000000000000000000000000000000000000023b000000000000000000000000000000000000000000000000000000000000023c000000000000000000000000000000000000000000000000000000000000023d000000000000000000000000000000000000000000000000000000000000023e000000000000000000000000000000000000000000000000000000000000023f3f0000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e0800c064a9343bfaf1345a5ad188e96aa4c5bad0b4a5590da51a9ff8f3c98ade8d008803d57dd0923c95a01b2bfbee4df6d66dc7baee1d2cd2770575feb86d040200ea64eed9489feb7bdf29bf817a6e8772e549da7b291028852d0dd3810cee9500947e8f904d41be8a4e08b146b4e1f0cd88f55722ef987d1d485c4196ab9f71002e5d9ed5d71bc3bea6edf988c4b9ba7fceb0815180d893852ed343c64ab55c0040f7f519ec89d360d83e242b6c0ce049d2b3356b8cfbf1ff3b733ef0f8a089006f5cfe5fc4a7b87c634f9e253a7cea2052229250d0c0e913eb50b5ef3612de00b759fce0eed36bb653b67255cce111b3529c383bd7d2b758f8cb53a4451f273f0000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000601000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000602000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000603000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000604000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000605000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006060000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000006110000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000061200000000000000000000000000000000000000000000000000000000000006090000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000616000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000617000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000611000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000612000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006160000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000061700000000000000000000000000000000000000000000000000000000000006210000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000062200000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000626000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000627000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000621000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000622000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006260000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000062700000000000000000000000000000000000000000000000000000000000006310000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000063200000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000636000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000637000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000631000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000632000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000063f00000000000000000000000000000000000000000000000000000000000006360000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000063700000000000000000000000000000000000000000000000000000000000006410000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000064200000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000646000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000647000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "txsEffectsHash": "0x00e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b", "decodedHeader": { @@ -72,10 +72,10 @@ "blockNumber": 1, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000043", "chainId": 31337, - "timestamp": 1724320114, + "timestamp": 1724857731, "version": 1, - "coinbase": "0xa7cd83a4518a418b4dfbec14238d07ea9e6a0e58", - "feeRecipient": "0x25ce59a5810d314c43717d1b8e0bb9fb8eb574fb4250817e4125c65496cf12f1", + "coinbase": "0x210c97d1d20ae59929bf8ac222b5508cd9c37d2a", + "feeRecipient": "0x2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -106,8 +106,8 @@ } } }, - "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000400e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00d0169cc64b8f1bd695ec8611a5602da48854dc4cc04989c4b63288b339cb1814f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000101a995cda6f326074cf650c6644269e29dbd0532e6a832238345b53ee70c878af000001000deac8396e31bc1196b442ad724bf8f751a245e518147d738cc84b9e1a56b4420000018023866f4c16f3ea1f37dd2ca42d1a635ea909b6c016e45e8434780d3741eb7dbb000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000066c70972a7cd83a4518a418b4dfbec14238d07ea9e6a0e5825ce59a5810d314c43717d1b8e0bb9fb8eb574fb4250817e4125c65496cf12f1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x0020d6fe0ac1564351eed062d6592a6255b2148ecda811aebaf64769a9a98092", + "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000400e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00d0169cc64b8f1bd695ec8611a5602da48854dc4cc04989c4b63288b339cb1814f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000101a995cda6f326074cf650c6644269e29dbd0532e6a832238345b53ee70c878af000001000deac8396e31bc1196b442ad724bf8f751a245e518147d738cc84b9e1a56b4420000018023866f4c16f3ea1f37dd2ca42d1a635ea909b6c016e45e8434780d3741eb7dbb000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000066cf3d83210c97d1d20ae59929bf8ac222b5508cd9c37d2a2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x003558e5dac17899221e594eb6d2f2089ce984a674aa63d3815afbeb964588a7", "numTxs": 4 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/mixed_block_2.json b/l1-contracts/test/fixtures/mixed_block_2.json index 603ca2936a6..80906bf0265 100644 --- a/l1-contracts/test/fixtures/mixed_block_2.json +++ b/l1-contracts/test/fixtures/mixed_block_2.json @@ -58,7 +58,7 @@ ] }, "block": { - "archive": "0x04b567324d50f258764769f7c4d666346e730bbb64572dfb6d3a8a5fb8fe93e3", + "archive": "0x1926af497e574a918cf90e15520a8fdf4ff569f2d39fc4e72c7d23da4abbdb0a", "body": "0x00000004000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e000000000000000000000000000000000000000000000000000000000000027f3f0000000000000000000000000000000000000000000000000000000000000340000000000000000000000000000000000000000000000000000000000000034100000000000000000000000000000000000000000000000000000000000003420000000000000000000000000000000000000000000000000000000000000343000000000000000000000000000000000000000000000000000000000000034400000000000000000000000000000000000000000000000000000000000003450000000000000000000000000000000000000000000000000000000000000346000000000000000000000000000000000000000000000000000000000000034700000000000000000000000000000000000000000000000000000000000003480000000000000000000000000000000000000000000000000000000000000349000000000000000000000000000000000000000000000000000000000000034a000000000000000000000000000000000000000000000000000000000000034b000000000000000000000000000000000000000000000000000000000000034c000000000000000000000000000000000000000000000000000000000000034d000000000000000000000000000000000000000000000000000000000000034e000000000000000000000000000000000000000000000000000000000000034f0000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000000000035100000000000000000000000000000000000000000000000000000000000003520000000000000000000000000000000000000000000000000000000000000353000000000000000000000000000000000000000000000000000000000000035400000000000000000000000000000000000000000000000000000000000003550000000000000000000000000000000000000000000000000000000000000356000000000000000000000000000000000000000000000000000000000000035700000000000000000000000000000000000000000000000000000000000003580000000000000000000000000000000000000000000000000000000000000359000000000000000000000000000000000000000000000000000000000000035a000000000000000000000000000000000000000000000000000000000000035b000000000000000000000000000000000000000000000000000000000000035c000000000000000000000000000000000000000000000000000000000000035d000000000000000000000000000000000000000000000000000000000000035e000000000000000000000000000000000000000000000000000000000000035f0000000000000000000000000000000000000000000000000000000000000360000000000000000000000000000000000000000000000000000000000000036100000000000000000000000000000000000000000000000000000000000003620000000000000000000000000000000000000000000000000000000000000363000000000000000000000000000000000000000000000000000000000000036400000000000000000000000000000000000000000000000000000000000003650000000000000000000000000000000000000000000000000000000000000366000000000000000000000000000000000000000000000000000000000000036700000000000000000000000000000000000000000000000000000000000003680000000000000000000000000000000000000000000000000000000000000369000000000000000000000000000000000000000000000000000000000000036a000000000000000000000000000000000000000000000000000000000000036b000000000000000000000000000000000000000000000000000000000000036c000000000000000000000000000000000000000000000000000000000000036d000000000000000000000000000000000000000000000000000000000000036e000000000000000000000000000000000000000000000000000000000000036f0000000000000000000000000000000000000000000000000000000000000370000000000000000000000000000000000000000000000000000000000000037100000000000000000000000000000000000000000000000000000000000003720000000000000000000000000000000000000000000000000000000000000373000000000000000000000000000000000000000000000000000000000000037400000000000000000000000000000000000000000000000000000000000003750000000000000000000000000000000000000000000000000000000000000376000000000000000000000000000000000000000000000000000000000000037700000000000000000000000000000000000000000000000000000000000003780000000000000000000000000000000000000000000000000000000000000379000000000000000000000000000000000000000000000000000000000000037a000000000000000000000000000000000000000000000000000000000000037b000000000000000000000000000000000000000000000000000000000000037c000000000000000000000000000000000000000000000000000000000000037d000000000000000000000000000000000000000000000000000000000000037e08003bbb1177505f3433bb062787fcac6585d30fa1a7e0b809ca5eef1cecc56cd0002d5b86280022d106b72e4425ea49f927ca2b8138fc13b3d9feeaf36ae61fb100adab9d66b73ae23a6e9127ebe0bcd963ef4312dd66878a6be131d39a7ee01c00d48f30dbb82c96c734c8abe33f4f9f75e6d0ba4462ee07d73c5335e04a02e900f31a3a4e7333ac6043a929fca12f5347e9e8bf1d67f5993860a774f10b77bc00230e3132bfa5df23e2e018b20cd4e0c75555825ee7924da73f017a279d39cd0095e2affd6b67c6ecbf77fa1e638139498e1642abb468c58ca8ce275260ea6100362e0941035fd171fab926caf55d18b22f7de99d60ac6f454386a9cce4a1ff3f0000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000064a0000000000000000000000000000000000000000000000000000000000000641000000000000000000000000000000000000000000000000000000000000064b0000000000000000000000000000000000000000000000000000000000000642000000000000000000000000000000000000000000000000000000000000064c0000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000064d0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000064e0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000064f00000000000000000000000000000000000000000000000000000000000006460000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000064700000000000000000000000000000000000000000000000000000000000006510000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000065200000000000000000000000000000000000000000000000000000000000006490000000000000000000000000000000000000000000000000000000000000653000000000000000000000000000000000000000000000000000000000000064a0000000000000000000000000000000000000000000000000000000000000654000000000000000000000000000000000000000000000000000000000000064b0000000000000000000000000000000000000000000000000000000000000655000000000000000000000000000000000000000000000000000000000000064c0000000000000000000000000000000000000000000000000000000000000656000000000000000000000000000000000000000000000000000000000000064d0000000000000000000000000000000000000000000000000000000000000657000000000000000000000000000000000000000000000000000000000000064e0000000000000000000000000000000000000000000000000000000000000658000000000000000000000000000000000000000000000000000000000000064f00000000000000000000000000000000000000000000000000000000000006590000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000065a0000000000000000000000000000000000000000000000000000000000000651000000000000000000000000000000000000000000000000000000000000065b0000000000000000000000000000000000000000000000000000000000000652000000000000000000000000000000000000000000000000000000000000065c0000000000000000000000000000000000000000000000000000000000000653000000000000000000000000000000000000000000000000000000000000065d0000000000000000000000000000000000000000000000000000000000000654000000000000000000000000000000000000000000000000000000000000065e0000000000000000000000000000000000000000000000000000000000000655000000000000000000000000000000000000000000000000000000000000065f00000000000000000000000000000000000000000000000000000000000006560000000000000000000000000000000000000000000000000000000000000660000000000000000000000000000000000000000000000000000000000000065700000000000000000000000000000000000000000000000000000000000006610000000000000000000000000000000000000000000000000000000000000658000000000000000000000000000000000000000000000000000000000000066200000000000000000000000000000000000000000000000000000000000006590000000000000000000000000000000000000000000000000000000000000663000000000000000000000000000000000000000000000000000000000000065a0000000000000000000000000000000000000000000000000000000000000664000000000000000000000000000000000000000000000000000000000000065b0000000000000000000000000000000000000000000000000000000000000665000000000000000000000000000000000000000000000000000000000000065c0000000000000000000000000000000000000000000000000000000000000666000000000000000000000000000000000000000000000000000000000000065d0000000000000000000000000000000000000000000000000000000000000667000000000000000000000000000000000000000000000000000000000000065e0000000000000000000000000000000000000000000000000000000000000668000000000000000000000000000000000000000000000000000000000000065f00000000000000000000000000000000000000000000000000000000000006690000000000000000000000000000000000000000000000000000000000000660000000000000000000000000000000000000000000000000000000000000066a0000000000000000000000000000000000000000000000000000000000000661000000000000000000000000000000000000000000000000000000000000066b0000000000000000000000000000000000000000000000000000000000000662000000000000000000000000000000000000000000000000000000000000066c0000000000000000000000000000000000000000000000000000000000000663000000000000000000000000000000000000000000000000000000000000066d0000000000000000000000000000000000000000000000000000000000000664000000000000000000000000000000000000000000000000000000000000066e0000000000000000000000000000000000000000000000000000000000000665000000000000000000000000000000000000000000000000000000000000066f00000000000000000000000000000000000000000000000000000000000006660000000000000000000000000000000000000000000000000000000000000670000000000000000000000000000000000000000000000000000000000000066700000000000000000000000000000000000000000000000000000000000006710000000000000000000000000000000000000000000000000000000000000668000000000000000000000000000000000000000000000000000000000000067200000000000000000000000000000000000000000000000000000000000006690000000000000000000000000000000000000000000000000000000000000673000000000000000000000000000000000000000000000000000000000000066a0000000000000000000000000000000000000000000000000000000000000674000000000000000000000000000000000000000000000000000000000000066b0000000000000000000000000000000000000000000000000000000000000675000000000000000000000000000000000000000000000000000000000000066c0000000000000000000000000000000000000000000000000000000000000676000000000000000000000000000000000000000000000000000000000000066d0000000000000000000000000000000000000000000000000000000000000677000000000000000000000000000000000000000000000000000000000000066e0000000000000000000000000000000000000000000000000000000000000678000000000000000000000000000000000000000000000000000000000000066f00000000000000000000000000000000000000000000000000000000000006790000000000000000000000000000000000000000000000000000000000000670000000000000000000000000000000000000000000000000000000000000067a0000000000000000000000000000000000000000000000000000000000000671000000000000000000000000000000000000000000000000000000000000067b0000000000000000000000000000000000000000000000000000000000000672000000000000000000000000000000000000000000000000000000000000067c0000000000000000000000000000000000000000000000000000000000000673000000000000000000000000000000000000000000000000000000000000067d0000000000000000000000000000000000000000000000000000000000000674000000000000000000000000000000000000000000000000000000000000067e0000000000000000000000000000000000000000000000000000000000000675000000000000000000000000000000000000000000000000000000000000067f00000000000000000000000000000000000000000000000000000000000006760000000000000000000000000000000000000000000000000000000000000680000000000000000000000000000000000000000000000000000000000000067700000000000000000000000000000000000000000000000000000000000006810000000000000000000000000000000000000000000000000000000000000678000000000000000000000000000000000000000000000000000000000000068200000000000000000000000000000000000000000000000000000000000006790000000000000000000000000000000000000000000000000000000000000683000000000000000000000000000000000000000000000000000000000000067a0000000000000000000000000000000000000000000000000000000000000684000000000000000000000000000000000000000000000000000000000000067b0000000000000000000000000000000000000000000000000000000000000685000000000000000000000000000000000000000000000000000000000000067c0000000000000000000000000000000000000000000000000000000000000686000000000000000000000000000000000000000000000000000000000000067d0000000000000000000000000000000000000000000000000000000000000687000000000000000000000000000000000000000000000000000000000000067e0000000000000000000000000000000000000000000000000000000000000688000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be00000000000000000000000000000000000000000000000000000000000002bf3f0000000000000000000000000000000000000000000000000000000000000380000000000000000000000000000000000000000000000000000000000000038100000000000000000000000000000000000000000000000000000000000003820000000000000000000000000000000000000000000000000000000000000383000000000000000000000000000000000000000000000000000000000000038400000000000000000000000000000000000000000000000000000000000003850000000000000000000000000000000000000000000000000000000000000386000000000000000000000000000000000000000000000000000000000000038700000000000000000000000000000000000000000000000000000000000003880000000000000000000000000000000000000000000000000000000000000389000000000000000000000000000000000000000000000000000000000000038a000000000000000000000000000000000000000000000000000000000000038b000000000000000000000000000000000000000000000000000000000000038c000000000000000000000000000000000000000000000000000000000000038d000000000000000000000000000000000000000000000000000000000000038e000000000000000000000000000000000000000000000000000000000000038f0000000000000000000000000000000000000000000000000000000000000390000000000000000000000000000000000000000000000000000000000000039100000000000000000000000000000000000000000000000000000000000003920000000000000000000000000000000000000000000000000000000000000393000000000000000000000000000000000000000000000000000000000000039400000000000000000000000000000000000000000000000000000000000003950000000000000000000000000000000000000000000000000000000000000396000000000000000000000000000000000000000000000000000000000000039700000000000000000000000000000000000000000000000000000000000003980000000000000000000000000000000000000000000000000000000000000399000000000000000000000000000000000000000000000000000000000000039a000000000000000000000000000000000000000000000000000000000000039b000000000000000000000000000000000000000000000000000000000000039c000000000000000000000000000000000000000000000000000000000000039d000000000000000000000000000000000000000000000000000000000000039e000000000000000000000000000000000000000000000000000000000000039f00000000000000000000000000000000000000000000000000000000000003a000000000000000000000000000000000000000000000000000000000000003a100000000000000000000000000000000000000000000000000000000000003a200000000000000000000000000000000000000000000000000000000000003a300000000000000000000000000000000000000000000000000000000000003a400000000000000000000000000000000000000000000000000000000000003a500000000000000000000000000000000000000000000000000000000000003a600000000000000000000000000000000000000000000000000000000000003a700000000000000000000000000000000000000000000000000000000000003a800000000000000000000000000000000000000000000000000000000000003a900000000000000000000000000000000000000000000000000000000000003aa00000000000000000000000000000000000000000000000000000000000003ab00000000000000000000000000000000000000000000000000000000000003ac00000000000000000000000000000000000000000000000000000000000003ad00000000000000000000000000000000000000000000000000000000000003ae00000000000000000000000000000000000000000000000000000000000003af00000000000000000000000000000000000000000000000000000000000003b000000000000000000000000000000000000000000000000000000000000003b100000000000000000000000000000000000000000000000000000000000003b200000000000000000000000000000000000000000000000000000000000003b300000000000000000000000000000000000000000000000000000000000003b400000000000000000000000000000000000000000000000000000000000003b500000000000000000000000000000000000000000000000000000000000003b600000000000000000000000000000000000000000000000000000000000003b700000000000000000000000000000000000000000000000000000000000003b800000000000000000000000000000000000000000000000000000000000003b900000000000000000000000000000000000000000000000000000000000003ba00000000000000000000000000000000000000000000000000000000000003bb00000000000000000000000000000000000000000000000000000000000003bc00000000000000000000000000000000000000000000000000000000000003bd00000000000000000000000000000000000000000000000000000000000003be08004a32502b5d2a0cf5d776c92ef74de61a28e7882bdadeb3b0530f472704604300808c2412e2725ecaa6a6bd77e3159349e238dc7817f906ba32afd40b3cb3cb00d7d4e3b313c4cce9bd98c317ea715847a92d795b82a6f8b8144b7c61bee64200b76f07678c289f41378029b1353a73e1afbe241612a97c23e7fc059099f49d00c72046b39a9dc902cee36db5214c72315636bd824abfabdf80b1c5e6a01fd7006e0a74fec166a12f5a62954776784f01cba3be7ab876eef2e49a2ab7a5b0f900c83ddd8f6b91086bc83485adbe8056212b4d33b91840cd3dc649f9736881460022441e76225010acce7f429dc754fb3260ae1d387937978f69c67a879ed0733f0000000000000000000000000000000000000000000000000000000000000680000000000000000000000000000000000000000000000000000000000000068a0000000000000000000000000000000000000000000000000000000000000681000000000000000000000000000000000000000000000000000000000000068b0000000000000000000000000000000000000000000000000000000000000682000000000000000000000000000000000000000000000000000000000000068c0000000000000000000000000000000000000000000000000000000000000683000000000000000000000000000000000000000000000000000000000000068d0000000000000000000000000000000000000000000000000000000000000684000000000000000000000000000000000000000000000000000000000000068e0000000000000000000000000000000000000000000000000000000000000685000000000000000000000000000000000000000000000000000000000000068f00000000000000000000000000000000000000000000000000000000000006860000000000000000000000000000000000000000000000000000000000000690000000000000000000000000000000000000000000000000000000000000068700000000000000000000000000000000000000000000000000000000000006910000000000000000000000000000000000000000000000000000000000000688000000000000000000000000000000000000000000000000000000000000069200000000000000000000000000000000000000000000000000000000000006890000000000000000000000000000000000000000000000000000000000000693000000000000000000000000000000000000000000000000000000000000068a0000000000000000000000000000000000000000000000000000000000000694000000000000000000000000000000000000000000000000000000000000068b0000000000000000000000000000000000000000000000000000000000000695000000000000000000000000000000000000000000000000000000000000068c0000000000000000000000000000000000000000000000000000000000000696000000000000000000000000000000000000000000000000000000000000068d0000000000000000000000000000000000000000000000000000000000000697000000000000000000000000000000000000000000000000000000000000068e0000000000000000000000000000000000000000000000000000000000000698000000000000000000000000000000000000000000000000000000000000068f00000000000000000000000000000000000000000000000000000000000006990000000000000000000000000000000000000000000000000000000000000690000000000000000000000000000000000000000000000000000000000000069a0000000000000000000000000000000000000000000000000000000000000691000000000000000000000000000000000000000000000000000000000000069b0000000000000000000000000000000000000000000000000000000000000692000000000000000000000000000000000000000000000000000000000000069c0000000000000000000000000000000000000000000000000000000000000693000000000000000000000000000000000000000000000000000000000000069d0000000000000000000000000000000000000000000000000000000000000694000000000000000000000000000000000000000000000000000000000000069e0000000000000000000000000000000000000000000000000000000000000695000000000000000000000000000000000000000000000000000000000000069f000000000000000000000000000000000000000000000000000000000000069600000000000000000000000000000000000000000000000000000000000006a0000000000000000000000000000000000000000000000000000000000000069700000000000000000000000000000000000000000000000000000000000006a1000000000000000000000000000000000000000000000000000000000000069800000000000000000000000000000000000000000000000000000000000006a2000000000000000000000000000000000000000000000000000000000000069900000000000000000000000000000000000000000000000000000000000006a3000000000000000000000000000000000000000000000000000000000000069a00000000000000000000000000000000000000000000000000000000000006a4000000000000000000000000000000000000000000000000000000000000069b00000000000000000000000000000000000000000000000000000000000006a5000000000000000000000000000000000000000000000000000000000000069c00000000000000000000000000000000000000000000000000000000000006a6000000000000000000000000000000000000000000000000000000000000069d00000000000000000000000000000000000000000000000000000000000006a7000000000000000000000000000000000000000000000000000000000000069e00000000000000000000000000000000000000000000000000000000000006a8000000000000000000000000000000000000000000000000000000000000069f00000000000000000000000000000000000000000000000000000000000006a900000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006aa00000000000000000000000000000000000000000000000000000000000006a100000000000000000000000000000000000000000000000000000000000006ab00000000000000000000000000000000000000000000000000000000000006a200000000000000000000000000000000000000000000000000000000000006ac00000000000000000000000000000000000000000000000000000000000006a300000000000000000000000000000000000000000000000000000000000006ad00000000000000000000000000000000000000000000000000000000000006a400000000000000000000000000000000000000000000000000000000000006ae00000000000000000000000000000000000000000000000000000000000006a500000000000000000000000000000000000000000000000000000000000006af00000000000000000000000000000000000000000000000000000000000006a600000000000000000000000000000000000000000000000000000000000006b000000000000000000000000000000000000000000000000000000000000006a700000000000000000000000000000000000000000000000000000000000006b100000000000000000000000000000000000000000000000000000000000006a800000000000000000000000000000000000000000000000000000000000006b200000000000000000000000000000000000000000000000000000000000006a900000000000000000000000000000000000000000000000000000000000006b300000000000000000000000000000000000000000000000000000000000006aa00000000000000000000000000000000000000000000000000000000000006b400000000000000000000000000000000000000000000000000000000000006ab00000000000000000000000000000000000000000000000000000000000006b500000000000000000000000000000000000000000000000000000000000006ac00000000000000000000000000000000000000000000000000000000000006b600000000000000000000000000000000000000000000000000000000000006ad00000000000000000000000000000000000000000000000000000000000006b700000000000000000000000000000000000000000000000000000000000006ae00000000000000000000000000000000000000000000000000000000000006b800000000000000000000000000000000000000000000000000000000000006af00000000000000000000000000000000000000000000000000000000000006b900000000000000000000000000000000000000000000000000000000000006b000000000000000000000000000000000000000000000000000000000000006ba00000000000000000000000000000000000000000000000000000000000006b100000000000000000000000000000000000000000000000000000000000006bb00000000000000000000000000000000000000000000000000000000000006b200000000000000000000000000000000000000000000000000000000000006bc00000000000000000000000000000000000000000000000000000000000006b300000000000000000000000000000000000000000000000000000000000006bd00000000000000000000000000000000000000000000000000000000000006b400000000000000000000000000000000000000000000000000000000000006be00000000000000000000000000000000000000000000000000000000000006b500000000000000000000000000000000000000000000000000000000000006bf00000000000000000000000000000000000000000000000000000000000006b600000000000000000000000000000000000000000000000000000000000006c000000000000000000000000000000000000000000000000000000000000006b700000000000000000000000000000000000000000000000000000000000006c100000000000000000000000000000000000000000000000000000000000006b800000000000000000000000000000000000000000000000000000000000006c200000000000000000000000000000000000000000000000000000000000006b900000000000000000000000000000000000000000000000000000000000006c300000000000000000000000000000000000000000000000000000000000006ba00000000000000000000000000000000000000000000000000000000000006c400000000000000000000000000000000000000000000000000000000000006bb00000000000000000000000000000000000000000000000000000000000006c500000000000000000000000000000000000000000000000000000000000006bc00000000000000000000000000000000000000000000000000000000000006c600000000000000000000000000000000000000000000000000000000000006bd00000000000000000000000000000000000000000000000000000000000006c700000000000000000000000000000000000000000000000000000000000006be00000000000000000000000000000000000000000000000000000000000006c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe00000000000000000000000000000000000000000000000000000000000002ff3f00000000000000000000000000000000000000000000000000000000000003c000000000000000000000000000000000000000000000000000000000000003c100000000000000000000000000000000000000000000000000000000000003c200000000000000000000000000000000000000000000000000000000000003c300000000000000000000000000000000000000000000000000000000000003c400000000000000000000000000000000000000000000000000000000000003c500000000000000000000000000000000000000000000000000000000000003c600000000000000000000000000000000000000000000000000000000000003c700000000000000000000000000000000000000000000000000000000000003c800000000000000000000000000000000000000000000000000000000000003c900000000000000000000000000000000000000000000000000000000000003ca00000000000000000000000000000000000000000000000000000000000003cb00000000000000000000000000000000000000000000000000000000000003cc00000000000000000000000000000000000000000000000000000000000003cd00000000000000000000000000000000000000000000000000000000000003ce00000000000000000000000000000000000000000000000000000000000003cf00000000000000000000000000000000000000000000000000000000000003d000000000000000000000000000000000000000000000000000000000000003d100000000000000000000000000000000000000000000000000000000000003d200000000000000000000000000000000000000000000000000000000000003d300000000000000000000000000000000000000000000000000000000000003d400000000000000000000000000000000000000000000000000000000000003d500000000000000000000000000000000000000000000000000000000000003d600000000000000000000000000000000000000000000000000000000000003d700000000000000000000000000000000000000000000000000000000000003d800000000000000000000000000000000000000000000000000000000000003d900000000000000000000000000000000000000000000000000000000000003da00000000000000000000000000000000000000000000000000000000000003db00000000000000000000000000000000000000000000000000000000000003dc00000000000000000000000000000000000000000000000000000000000003dd00000000000000000000000000000000000000000000000000000000000003de00000000000000000000000000000000000000000000000000000000000003df00000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000003e100000000000000000000000000000000000000000000000000000000000003e200000000000000000000000000000000000000000000000000000000000003e300000000000000000000000000000000000000000000000000000000000003e400000000000000000000000000000000000000000000000000000000000003e500000000000000000000000000000000000000000000000000000000000003e600000000000000000000000000000000000000000000000000000000000003e700000000000000000000000000000000000000000000000000000000000003e800000000000000000000000000000000000000000000000000000000000003e900000000000000000000000000000000000000000000000000000000000003ea00000000000000000000000000000000000000000000000000000000000003eb00000000000000000000000000000000000000000000000000000000000003ec00000000000000000000000000000000000000000000000000000000000003ed00000000000000000000000000000000000000000000000000000000000003ee00000000000000000000000000000000000000000000000000000000000003ef00000000000000000000000000000000000000000000000000000000000003f000000000000000000000000000000000000000000000000000000000000003f100000000000000000000000000000000000000000000000000000000000003f200000000000000000000000000000000000000000000000000000000000003f300000000000000000000000000000000000000000000000000000000000003f400000000000000000000000000000000000000000000000000000000000003f500000000000000000000000000000000000000000000000000000000000003f600000000000000000000000000000000000000000000000000000000000003f700000000000000000000000000000000000000000000000000000000000003f800000000000000000000000000000000000000000000000000000000000003f900000000000000000000000000000000000000000000000000000000000003fa00000000000000000000000000000000000000000000000000000000000003fb00000000000000000000000000000000000000000000000000000000000003fc00000000000000000000000000000000000000000000000000000000000003fd00000000000000000000000000000000000000000000000000000000000003fe0800bb07f923e24c36227d717557d58d32b99370b81f7a70d1835becc1114f7d7700624291eff6ab801e5668bc6619a3df132f8b392b063f09dfe8a69e38224eb200bd6b195f8716ab3dc93c44037cac579d25d0e77c2782b5aa62534ee25d36bc008c99d470d2c53624a8c5bedfeffdcc5356f6da89ee0e372b3ea3fa4f8cd652009944e00a3f9a7d2e8a535d3a210c3271d5732518037704d67ef5d42a8b82c200523014cb6eabe4366c6e599ba96534fc15ecc804a13fbaaacf8717e1b0a8d20005621252c4b36c113f21ad6c13b99e5cef514bdd98ef0aae4075b5cb5a000a00d80cb2a60aae6c3d2e7d27fb1519a708a18bb5b5085f78684bdee7512856a93f00000000000000000000000000000000000000000000000000000000000006c000000000000000000000000000000000000000000000000000000000000006ca00000000000000000000000000000000000000000000000000000000000006c100000000000000000000000000000000000000000000000000000000000006cb00000000000000000000000000000000000000000000000000000000000006c200000000000000000000000000000000000000000000000000000000000006cc00000000000000000000000000000000000000000000000000000000000006c300000000000000000000000000000000000000000000000000000000000006cd00000000000000000000000000000000000000000000000000000000000006c400000000000000000000000000000000000000000000000000000000000006ce00000000000000000000000000000000000000000000000000000000000006c500000000000000000000000000000000000000000000000000000000000006cf00000000000000000000000000000000000000000000000000000000000006c600000000000000000000000000000000000000000000000000000000000006d000000000000000000000000000000000000000000000000000000000000006c700000000000000000000000000000000000000000000000000000000000006d100000000000000000000000000000000000000000000000000000000000006c800000000000000000000000000000000000000000000000000000000000006d200000000000000000000000000000000000000000000000000000000000006c900000000000000000000000000000000000000000000000000000000000006d300000000000000000000000000000000000000000000000000000000000006ca00000000000000000000000000000000000000000000000000000000000006d400000000000000000000000000000000000000000000000000000000000006cb00000000000000000000000000000000000000000000000000000000000006d500000000000000000000000000000000000000000000000000000000000006cc00000000000000000000000000000000000000000000000000000000000006d600000000000000000000000000000000000000000000000000000000000006cd00000000000000000000000000000000000000000000000000000000000006d700000000000000000000000000000000000000000000000000000000000006ce00000000000000000000000000000000000000000000000000000000000006d800000000000000000000000000000000000000000000000000000000000006cf00000000000000000000000000000000000000000000000000000000000006d900000000000000000000000000000000000000000000000000000000000006d000000000000000000000000000000000000000000000000000000000000006da00000000000000000000000000000000000000000000000000000000000006d100000000000000000000000000000000000000000000000000000000000006db00000000000000000000000000000000000000000000000000000000000006d200000000000000000000000000000000000000000000000000000000000006dc00000000000000000000000000000000000000000000000000000000000006d300000000000000000000000000000000000000000000000000000000000006dd00000000000000000000000000000000000000000000000000000000000006d400000000000000000000000000000000000000000000000000000000000006de00000000000000000000000000000000000000000000000000000000000006d500000000000000000000000000000000000000000000000000000000000006df00000000000000000000000000000000000000000000000000000000000006d600000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000006d700000000000000000000000000000000000000000000000000000000000006e100000000000000000000000000000000000000000000000000000000000006d800000000000000000000000000000000000000000000000000000000000006e200000000000000000000000000000000000000000000000000000000000006d900000000000000000000000000000000000000000000000000000000000006e300000000000000000000000000000000000000000000000000000000000006da00000000000000000000000000000000000000000000000000000000000006e400000000000000000000000000000000000000000000000000000000000006db00000000000000000000000000000000000000000000000000000000000006e500000000000000000000000000000000000000000000000000000000000006dc00000000000000000000000000000000000000000000000000000000000006e600000000000000000000000000000000000000000000000000000000000006dd00000000000000000000000000000000000000000000000000000000000006e700000000000000000000000000000000000000000000000000000000000006de00000000000000000000000000000000000000000000000000000000000006e800000000000000000000000000000000000000000000000000000000000006df00000000000000000000000000000000000000000000000000000000000006e900000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000006ea00000000000000000000000000000000000000000000000000000000000006e100000000000000000000000000000000000000000000000000000000000006eb00000000000000000000000000000000000000000000000000000000000006e200000000000000000000000000000000000000000000000000000000000006ec00000000000000000000000000000000000000000000000000000000000006e300000000000000000000000000000000000000000000000000000000000006ed00000000000000000000000000000000000000000000000000000000000006e400000000000000000000000000000000000000000000000000000000000006ee00000000000000000000000000000000000000000000000000000000000006e500000000000000000000000000000000000000000000000000000000000006ef00000000000000000000000000000000000000000000000000000000000006e600000000000000000000000000000000000000000000000000000000000006f000000000000000000000000000000000000000000000000000000000000006e700000000000000000000000000000000000000000000000000000000000006f100000000000000000000000000000000000000000000000000000000000006e800000000000000000000000000000000000000000000000000000000000006f200000000000000000000000000000000000000000000000000000000000006e900000000000000000000000000000000000000000000000000000000000006f300000000000000000000000000000000000000000000000000000000000006ea00000000000000000000000000000000000000000000000000000000000006f400000000000000000000000000000000000000000000000000000000000006eb00000000000000000000000000000000000000000000000000000000000006f500000000000000000000000000000000000000000000000000000000000006ec00000000000000000000000000000000000000000000000000000000000006f600000000000000000000000000000000000000000000000000000000000006ed00000000000000000000000000000000000000000000000000000000000006f700000000000000000000000000000000000000000000000000000000000006ee00000000000000000000000000000000000000000000000000000000000006f800000000000000000000000000000000000000000000000000000000000006ef00000000000000000000000000000000000000000000000000000000000006f900000000000000000000000000000000000000000000000000000000000006f000000000000000000000000000000000000000000000000000000000000006fa00000000000000000000000000000000000000000000000000000000000006f100000000000000000000000000000000000000000000000000000000000006fb00000000000000000000000000000000000000000000000000000000000006f200000000000000000000000000000000000000000000000000000000000006fc00000000000000000000000000000000000000000000000000000000000006f300000000000000000000000000000000000000000000000000000000000006fd00000000000000000000000000000000000000000000000000000000000006f400000000000000000000000000000000000000000000000000000000000006fe00000000000000000000000000000000000000000000000000000000000006f500000000000000000000000000000000000000000000000000000000000006ff00000000000000000000000000000000000000000000000000000000000006f6000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006f7000000000000000000000000000000000000000000000000000000000000070100000000000000000000000000000000000000000000000000000000000006f8000000000000000000000000000000000000000000000000000000000000070200000000000000000000000000000000000000000000000000000000000006f9000000000000000000000000000000000000000000000000000000000000070300000000000000000000000000000000000000000000000000000000000006fa000000000000000000000000000000000000000000000000000000000000070400000000000000000000000000000000000000000000000000000000000006fb000000000000000000000000000000000000000000000000000000000000070500000000000000000000000000000000000000000000000000000000000006fc000000000000000000000000000000000000000000000000000000000000070600000000000000000000000000000000000000000000000000000000000006fd000000000000000000000000000000000000000000000000000000000000070700000000000000000000000000000000000000000000000000000000000006fe0000000000000000000000000000000000000000000000000000000000000708000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e000000000000000000000000000000000000000000000000000000000000033f3f0000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000040100000000000000000000000000000000000000000000000000000000000004020000000000000000000000000000000000000000000000000000000000000403000000000000000000000000000000000000000000000000000000000000040400000000000000000000000000000000000000000000000000000000000004050000000000000000000000000000000000000000000000000000000000000406000000000000000000000000000000000000000000000000000000000000040700000000000000000000000000000000000000000000000000000000000004080000000000000000000000000000000000000000000000000000000000000409000000000000000000000000000000000000000000000000000000000000040a000000000000000000000000000000000000000000000000000000000000040b000000000000000000000000000000000000000000000000000000000000040c000000000000000000000000000000000000000000000000000000000000040d000000000000000000000000000000000000000000000000000000000000040e000000000000000000000000000000000000000000000000000000000000040f0000000000000000000000000000000000000000000000000000000000000410000000000000000000000000000000000000000000000000000000000000041100000000000000000000000000000000000000000000000000000000000004120000000000000000000000000000000000000000000000000000000000000413000000000000000000000000000000000000000000000000000000000000041400000000000000000000000000000000000000000000000000000000000004150000000000000000000000000000000000000000000000000000000000000416000000000000000000000000000000000000000000000000000000000000041700000000000000000000000000000000000000000000000000000000000004180000000000000000000000000000000000000000000000000000000000000419000000000000000000000000000000000000000000000000000000000000041a000000000000000000000000000000000000000000000000000000000000041b000000000000000000000000000000000000000000000000000000000000041c000000000000000000000000000000000000000000000000000000000000041d000000000000000000000000000000000000000000000000000000000000041e000000000000000000000000000000000000000000000000000000000000041f0000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000042100000000000000000000000000000000000000000000000000000000000004220000000000000000000000000000000000000000000000000000000000000423000000000000000000000000000000000000000000000000000000000000042400000000000000000000000000000000000000000000000000000000000004250000000000000000000000000000000000000000000000000000000000000426000000000000000000000000000000000000000000000000000000000000042700000000000000000000000000000000000000000000000000000000000004280000000000000000000000000000000000000000000000000000000000000429000000000000000000000000000000000000000000000000000000000000042a000000000000000000000000000000000000000000000000000000000000042b000000000000000000000000000000000000000000000000000000000000042c000000000000000000000000000000000000000000000000000000000000042d000000000000000000000000000000000000000000000000000000000000042e000000000000000000000000000000000000000000000000000000000000042f0000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000000000043100000000000000000000000000000000000000000000000000000000000004320000000000000000000000000000000000000000000000000000000000000433000000000000000000000000000000000000000000000000000000000000043400000000000000000000000000000000000000000000000000000000000004350000000000000000000000000000000000000000000000000000000000000436000000000000000000000000000000000000000000000000000000000000043700000000000000000000000000000000000000000000000000000000000004380000000000000000000000000000000000000000000000000000000000000439000000000000000000000000000000000000000000000000000000000000043a000000000000000000000000000000000000000000000000000000000000043b000000000000000000000000000000000000000000000000000000000000043c000000000000000000000000000000000000000000000000000000000000043d000000000000000000000000000000000000000000000000000000000000043e0800a38df4d53fe5da3d48c97dd0f58e7a52faade40c8f50bc0f6408a2b5b3829800a3d4b994758c6630518cce3116391874b9b3b8a1bbcb36485d2702f05af359004c06e9dd14418667d7c66384699665f9222ed08be87c67293a2f1b6b4c41aa006b77b11703f0e3d21aa68c9f6d2ae8ad94ecafdb56be0c4605a0c6723e9ddc0049a087e215b640d8360f8dd98e07c77e4d2025a0dcdcaf78bc00bd6fde09680014741a52013a10622de90b1a7854f1203af5c97121a4b3774c249fba6e0dba00c783bd01c4a4cafe83fd3b4d1ce0c555be4b6d1f772cccccf86049f9af0bbe002ff3ac4d7e2f1caf1fb30e1d20eba5be2b6a42dd54690dee988a934026491e3f0000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000070a0000000000000000000000000000000000000000000000000000000000000701000000000000000000000000000000000000000000000000000000000000070b0000000000000000000000000000000000000000000000000000000000000702000000000000000000000000000000000000000000000000000000000000070c0000000000000000000000000000000000000000000000000000000000000703000000000000000000000000000000000000000000000000000000000000070d0000000000000000000000000000000000000000000000000000000000000704000000000000000000000000000000000000000000000000000000000000070e0000000000000000000000000000000000000000000000000000000000000705000000000000000000000000000000000000000000000000000000000000070f00000000000000000000000000000000000000000000000000000000000007060000000000000000000000000000000000000000000000000000000000000710000000000000000000000000000000000000000000000000000000000000070700000000000000000000000000000000000000000000000000000000000007110000000000000000000000000000000000000000000000000000000000000708000000000000000000000000000000000000000000000000000000000000071200000000000000000000000000000000000000000000000000000000000007090000000000000000000000000000000000000000000000000000000000000713000000000000000000000000000000000000000000000000000000000000070a0000000000000000000000000000000000000000000000000000000000000714000000000000000000000000000000000000000000000000000000000000070b0000000000000000000000000000000000000000000000000000000000000715000000000000000000000000000000000000000000000000000000000000070c0000000000000000000000000000000000000000000000000000000000000716000000000000000000000000000000000000000000000000000000000000070d0000000000000000000000000000000000000000000000000000000000000717000000000000000000000000000000000000000000000000000000000000070e0000000000000000000000000000000000000000000000000000000000000718000000000000000000000000000000000000000000000000000000000000070f00000000000000000000000000000000000000000000000000000000000007190000000000000000000000000000000000000000000000000000000000000710000000000000000000000000000000000000000000000000000000000000071a0000000000000000000000000000000000000000000000000000000000000711000000000000000000000000000000000000000000000000000000000000071b0000000000000000000000000000000000000000000000000000000000000712000000000000000000000000000000000000000000000000000000000000071c0000000000000000000000000000000000000000000000000000000000000713000000000000000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000714000000000000000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000715000000000000000000000000000000000000000000000000000000000000071f00000000000000000000000000000000000000000000000000000000000007160000000000000000000000000000000000000000000000000000000000000720000000000000000000000000000000000000000000000000000000000000071700000000000000000000000000000000000000000000000000000000000007210000000000000000000000000000000000000000000000000000000000000718000000000000000000000000000000000000000000000000000000000000072200000000000000000000000000000000000000000000000000000000000007190000000000000000000000000000000000000000000000000000000000000723000000000000000000000000000000000000000000000000000000000000071a0000000000000000000000000000000000000000000000000000000000000724000000000000000000000000000000000000000000000000000000000000071b0000000000000000000000000000000000000000000000000000000000000725000000000000000000000000000000000000000000000000000000000000071c0000000000000000000000000000000000000000000000000000000000000726000000000000000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000727000000000000000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000728000000000000000000000000000000000000000000000000000000000000071f00000000000000000000000000000000000000000000000000000000000007290000000000000000000000000000000000000000000000000000000000000720000000000000000000000000000000000000000000000000000000000000072a0000000000000000000000000000000000000000000000000000000000000721000000000000000000000000000000000000000000000000000000000000072b0000000000000000000000000000000000000000000000000000000000000722000000000000000000000000000000000000000000000000000000000000072c0000000000000000000000000000000000000000000000000000000000000723000000000000000000000000000000000000000000000000000000000000072d0000000000000000000000000000000000000000000000000000000000000724000000000000000000000000000000000000000000000000000000000000072e0000000000000000000000000000000000000000000000000000000000000725000000000000000000000000000000000000000000000000000000000000072f00000000000000000000000000000000000000000000000000000000000007260000000000000000000000000000000000000000000000000000000000000730000000000000000000000000000000000000000000000000000000000000072700000000000000000000000000000000000000000000000000000000000007310000000000000000000000000000000000000000000000000000000000000728000000000000000000000000000000000000000000000000000000000000073200000000000000000000000000000000000000000000000000000000000007290000000000000000000000000000000000000000000000000000000000000733000000000000000000000000000000000000000000000000000000000000072a0000000000000000000000000000000000000000000000000000000000000734000000000000000000000000000000000000000000000000000000000000072b0000000000000000000000000000000000000000000000000000000000000735000000000000000000000000000000000000000000000000000000000000072c0000000000000000000000000000000000000000000000000000000000000736000000000000000000000000000000000000000000000000000000000000072d0000000000000000000000000000000000000000000000000000000000000737000000000000000000000000000000000000000000000000000000000000072e0000000000000000000000000000000000000000000000000000000000000738000000000000000000000000000000000000000000000000000000000000072f00000000000000000000000000000000000000000000000000000000000007390000000000000000000000000000000000000000000000000000000000000730000000000000000000000000000000000000000000000000000000000000073a0000000000000000000000000000000000000000000000000000000000000731000000000000000000000000000000000000000000000000000000000000073b0000000000000000000000000000000000000000000000000000000000000732000000000000000000000000000000000000000000000000000000000000073c0000000000000000000000000000000000000000000000000000000000000733000000000000000000000000000000000000000000000000000000000000073d0000000000000000000000000000000000000000000000000000000000000734000000000000000000000000000000000000000000000000000000000000073e0000000000000000000000000000000000000000000000000000000000000735000000000000000000000000000000000000000000000000000000000000073f00000000000000000000000000000000000000000000000000000000000007360000000000000000000000000000000000000000000000000000000000000740000000000000000000000000000000000000000000000000000000000000073700000000000000000000000000000000000000000000000000000000000007410000000000000000000000000000000000000000000000000000000000000738000000000000000000000000000000000000000000000000000000000000074200000000000000000000000000000000000000000000000000000000000007390000000000000000000000000000000000000000000000000000000000000743000000000000000000000000000000000000000000000000000000000000073a0000000000000000000000000000000000000000000000000000000000000744000000000000000000000000000000000000000000000000000000000000073b0000000000000000000000000000000000000000000000000000000000000745000000000000000000000000000000000000000000000000000000000000073c0000000000000000000000000000000000000000000000000000000000000746000000000000000000000000000000000000000000000000000000000000073d0000000000000000000000000000000000000000000000000000000000000747000000000000000000000000000000000000000000000000000000000000073e0000000000000000000000000000000000000000000000000000000000000748000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "txsEffectsHash": "0x008e46703a73fee39cb8a1bd50a03e090eb250de227639bbf1448462452bd646", "decodedHeader": { @@ -72,10 +72,10 @@ "blockNumber": 2, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000055", "chainId": 31337, - "timestamp": 1724320330, + "timestamp": 1724857947, "version": 1, - "coinbase": "0xa7cd83a4518a418b4dfbec14238d07ea9e6a0e58", - "feeRecipient": "0x25ce59a5810d314c43717d1b8e0bb9fb8eb574fb4250817e4125c65496cf12f1", + "coinbase": "0x210c97d1d20ae59929bf8ac222b5508cd9c37d2a", + "feeRecipient": "0x2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -83,7 +83,7 @@ }, "lastArchive": { "nextAvailableLeafIndex": 2, - "root": "0x2e509ada109d80ef634c0eca74fecd28b17727847e3b21cf01961c73b1b58978" + "root": "0x16ff2b46f1343feaee80c00c6b6c8e4a403a5027f1a8127caa5e7c7d67b0e96a" }, "stateReference": { "l1ToL2MessageTree": { @@ -106,8 +106,8 @@ } } }, - "header": "0x2e509ada109d80ef634c0eca74fecd28b17727847e3b21cf01961c73b1b58978000000020000000000000000000000000000000000000000000000000000000000000004008e46703a73fee39cb8a1bd50a03e090eb250de227639bbf1448462452bd64600212ff46db74e06c26240f9a92fb6fea84709380935d657361bbd5bcb89193700d4b436f0c9857646ed7cf0836bd61c857183956d1acefe52fc99ef7b333a38224c43ed89fb9404e06e7382170d1e279a53211bab61876f38d8a4180390b7ad0000002017752a4346cf34b18277458ace73be4895316cb1c3cbce628d573d5d10cde7ce00000200152db065a479b5630768d6c5250bb6233e71729f857c16cffa98569acf90a2bf000002800a020b31737a919cbd6b0c0fe25d466a11e2186eb8038cd63a5e7d2900473d53000002800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000066c70a4aa7cd83a4518a418b4dfbec14238d07ea9e6a0e5825ce59a5810d314c43717d1b8e0bb9fb8eb574fb4250817e4125c65496cf12f1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x001c5bc0fe3f9edc312a5599452feb890df0d72a034179b49335c34055ba9c05", + "header": "0x16ff2b46f1343feaee80c00c6b6c8e4a403a5027f1a8127caa5e7c7d67b0e96a000000020000000000000000000000000000000000000000000000000000000000000004008e46703a73fee39cb8a1bd50a03e090eb250de227639bbf1448462452bd64600212ff46db74e06c26240f9a92fb6fea84709380935d657361bbd5bcb89193700d4b436f0c9857646ed7cf0836bd61c857183956d1acefe52fc99ef7b333a38224c43ed89fb9404e06e7382170d1e279a53211bab61876f38d8a4180390b7ad0000002017752a4346cf34b18277458ace73be4895316cb1c3cbce628d573d5d10cde7ce00000200152db065a479b5630768d6c5250bb6233e71729f857c16cffa98569acf90a2bf000002800a020b31737a919cbd6b0c0fe25d466a11e2186eb8038cd63a5e7d2900473d53000002800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000066cf3e5b210c97d1d20ae59929bf8ac222b5508cd9c37d2a2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x004a7a3146709849c48af3458b15956b28962ea0938fd6dd655fbb81f6e27222", "numTxs": 4 } } \ No newline at end of file diff --git a/l1-contracts/test/sparta/DevNet.t.sol b/l1-contracts/test/sparta/DevNet.t.sol index d37defe7ba8..4f043de9e84 100644 --- a/l1-contracts/test/sparta/DevNet.t.sol +++ b/l1-contracts/test/sparta/DevNet.t.sol @@ -166,7 +166,7 @@ contract DevNetTest is DecoderBase { // Why don't we end up here? vm.expectRevert( abi.encodeWithSelector( - Errors.Leonidas__InvalidProposer.selector, rollup.getValidatorAt(0), ree.proposer + Errors.DevNet__InvalidProposer.selector, rollup.getValidatorAt(0), ree.proposer ) ); ree.shouldRevert = true; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 020e5077b69..9ec67f80d94 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -5,12 +5,12 @@ import { AZTEC_SLOT_DURATION, ETHEREUM_SLOT_DURATION, EthAddress, + GENESIS_ARCHIVE_ROOT, type Header, - IS_DEV_NET, type Proof, } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; -import { type Fr } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { serializeToBuffer } from '@aztec/foundation/serialize'; import { InterruptibleSleep } from '@aztec/foundation/sleep'; @@ -20,6 +20,7 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import pick from 'lodash.pick'; import { + ContractFunctionRevertedError, type GetContractReturnType, type Hex, type HttpTransport, @@ -102,6 +103,15 @@ export type MetadataForSlot = { archive: Buffer; }; +// @note If we want to simulate in the future, we have to skip the viem simulations and use `reads` instead +// This is because the viem simulations are not able to simulate the future, only the current state. +// This means that we will be simulating as if `block.timestamp` is the same for the next block +// as for the last block. +// Nevertheless, it can be quite useful for figuring out why exactly the transaction is failing +// as a middle ground right now, we will be skipping the simulation and just sending the transaction +// but only after we have done a successful run of the `validateHeader` for the timestamp in the future. +const SKIP_SIMULATION = true; + /** * Publishes L2 blocks to L1. This implementation does *not* retry a transaction in * the event of network congestion, but should work for local development. @@ -176,20 +186,46 @@ export class L1Publisher { return Promise.resolve(EthAddress.fromString(this.account.address)); } - public async willSimulationFail(slot: bigint): Promise { - // @note When simulating or estimating gas, `viem` will use the CURRENT state of the chain - // and not the state in the next block. Meaning that the timestamp will be the same as - // the previous block, which means that the slot will also be the same. - // This effectively means that if we try to simulate for the first L1 block where we - // will be proposer, we will have a failure as the slot have not yet changed. - // @todo #8110 + public async canProposeAtNextEthBlock(archive: Buffer): Promise<[bigint, bigint]> { + const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); + const [slot, blockNumber] = await this.rollupContract.read.canProposeAtTime([ + ts, + this.account.address, + `0x${archive.toString('hex')}`, + ]); + return [slot, blockNumber]; + } - if (IS_DEV_NET) { - return false; - } + public async validateBlockForSubmission( + header: Header, + digest: Buffer = new Fr(GENESIS_ARCHIVE_ROOT).toBuffer(), + attestations: Signature[] = [], + ): Promise { + const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); - const currentSlot = BigInt(await this.rollupContract.read.getCurrentSlot()); - return currentSlot != slot; + const formattedAttestations = attestations.map(attest => attest.toViemSignature()); + const flags = { ignoreDA: true, ignoreSignatures: attestations.length == 0 }; + + const args = [ + `0x${header.toBuffer().toString('hex')}`, + formattedAttestations, + `0x${digest.toString('hex')}`, + ts, + flags, + ] as const; + + try { + await this.rollupContract.read.validateHeader(args, { account: this.account }); + } catch (error: unknown) { + // Specify the type of error + if (error instanceof ContractFunctionRevertedError) { + const err = error as ContractFunctionRevertedError; + this.log.debug(`Validation failed: ${err.message}`, err.data); + } else { + this.log.debug(`Unexpected error during validation: ${error}`); + } + throw error; // Re-throw the error if needed + } } // @note Assumes that all ethereum slots have blocks @@ -247,11 +283,11 @@ export class L1Publisher { blockHash: block.hash().toString(), }; - if (await this.willSimulationFail(block.header.globalVariables.slotNumber.toBigInt())) { - // @note See comment in willSimulationFail for more information - this.log.info(`Simulation will fail for slot ${block.header.globalVariables.slotNumber.toBigInt()}`); - return false; - } + // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available + // This means that we can avoid the simulation issues in later checks. + // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which + // make time consistency checks break. + await this.validateBlockForSubmission(block.header, block.archive.root.toBuffer(), attestations); const processTxArgs = { header: block.header.toBuffer(), @@ -436,9 +472,11 @@ export class L1Publisher { `0x${proof.toString('hex')}`, ] as const; - await this.rollupContract.simulate.submitBlockRootProof(args, { - account: this.account, - }); + if (!SKIP_SIMULATION) { + await this.rollupContract.simulate.submitBlockRootProof(args, { + account: this.account, + }); + } return await this.rollupContract.write.submitBlockRootProof(args, { account: this.account, @@ -449,6 +487,7 @@ export class L1Publisher { } } + // This is used in `integration_l1_publisher.test.ts` currently. Could be removed though. private async sendPublishTx(encodedBody: Buffer): Promise { if (!this.interrupted) { try { @@ -481,7 +520,9 @@ export class L1Publisher { attestations, ] as const; - await this.rollupContract.simulate.process(args, { account: this.account }); + if (!SKIP_SIMULATION) { + await this.rollupContract.simulate.process(args, { account: this.account }); + } return await this.rollupContract.write.process(args, { account: this.account, @@ -493,8 +534,9 @@ export class L1Publisher { `0x${encodedData.blockHash.toString('hex')}`, ] as const; - await this.rollupContract.simulate.process(args, { account: this.account }); - + if (!SKIP_SIMULATION) { + await this.rollupContract.simulate.process(args, { account: this.account }); + } return await this.rollupContract.write.process(args, { account: this.account, }); @@ -519,10 +561,11 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - // Using simulate to get a meaningful error message - await this.rollupContract.simulate.publishAndProcess(args, { - account: this.account, - }); + if (!SKIP_SIMULATION) { + await this.rollupContract.simulate.publishAndProcess(args, { + account: this.account, + }); + } return await this.rollupContract.write.publishAndProcess(args, { account: this.account, @@ -535,9 +578,11 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - await this.rollupContract.simulate.publishAndProcess(args, { - account: this.account, - }); + if (!SKIP_SIMULATION) { + await this.rollupContract.simulate.publishAndProcess(args, { + account: this.account, + }); + } return await this.rollupContract.write.publishAndProcess(args, { account: this.account, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index aec543dfef4..122df289b8d 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -10,7 +10,16 @@ import { } from '@aztec/circuit-types'; import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; -import { AztecAddress, EthAddress, type GlobalVariables, type Header, IS_DEV_NET } from '@aztec/circuits.js'; +import { + AppendOnlyTreeSnapshot, + AztecAddress, + ContentCommitment, + EthAddress, + GENESIS_ARCHIVE_ROOT, + Header, + IS_DEV_NET, + StateReference, +} from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; @@ -21,6 +30,8 @@ import { Attributes, type TelemetryClient, type Tracer, trackSpan } from '@aztec import { type ValidatorClient } from '@aztec/validator-client'; import { type WorldStateStatus, type WorldStateSynchronizer } from '@aztec/world-state'; +import { BaseError, ContractFunctionRevertedError } from 'viem'; + import { type BlockBuilderFactory } from '../block_builder/index.js'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { type L1Publisher } from '../publisher/l1-publisher.js'; @@ -197,11 +208,13 @@ export class Sequencer { ? await this.l2BlockSource.getBlockNumber() : Number(historicalHeader.globalVariables.blockNumber.toBigInt())) + 1; - const chainTipArchive = chainTip?.archive.root.toBuffer(); + // If we cannot find a tip archive, assume genesis. + const chainTipArchive = + chainTip == undefined ? new Fr(GENESIS_ARCHIVE_ROOT).toBuffer() : chainTip?.archive.root.toBuffer(); let slot: bigint; try { - slot = await this.canProposeBlock(historicalHeader, undefined, chainTipArchive); + slot = await this.mayProposeBlock(chainTipArchive, BigInt(newBlockNumber)); } catch (err) { this.log.debug(`Cannot propose for block ${newBlockNumber}`); return; @@ -228,6 +241,15 @@ export class Sequencer { slot, ); + // If I created a "partial" header here that should make our job much easier. + const proposalHeader = new Header( + new AppendOnlyTreeSnapshot(Fr.fromBuffer(chainTipArchive), 1), + ContentCommitment.empty(), + StateReference.empty(), + newGlobalVariables, + Fr.ZERO, + ); + // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here const allValidTxs = await this.takeValidTxs( pendingTxs, @@ -246,7 +268,7 @@ export class Sequencer { return; } - await this.buildBlockAndPublish(validTxs, newGlobalVariables, historicalHeader, chainTipArchive); + await this.buildBlockAndPublish(validTxs, proposalHeader, historicalHeader); } catch (err) { if (BlockProofError.isBlockProofError(err)) { const txHashes = err.txHashes.filter(h => !h.isZero()); @@ -267,109 +289,28 @@ export class Sequencer { return this.maxSecondsBetweenBlocks > 0 && elapsed >= this.maxSecondsBetweenBlocks; } - async canProposeBlock( - historicalHeader?: Header, - globalVariables?: GlobalVariables, - tipArchive?: Buffer, - ): Promise { - // @note In order to be able to propose a block, a few conditions must be met: - // - We must be caught up to the pending chain - // - The tip archive must match the one from the L1 - // - The block number should match the one from the L1 - // - If we have built a block, the block number must match the next pending block - // - There cannot already be a block for the slot - // - If we are NOT in devnet, then the active slot must match the slot number of the block - // - The proposer must either be free for all or specifically us. - // - // Note that the ordering of these checks are NOT optimised for performance, but to resemble the ordering - // that is used in the Rollup contract: - // - _validateHeaderForSubmissionBase - // - _validateHeaderForSubmissionSequencerSelection - // - // Also, we are logging debug messages for checks that fail to make it easier to debug, as we are usually - // catching the errors. - - const { proposer, slot, pendingBlockNumber, archive } = await this.publisher.getMetadataForSlotAtNextEthBlock(); - - if (await this.publisher.willSimulationFail(slot)) { - // @note See comment in willSimulationFail for more information - const msg = `Simulation will fail for slot ${slot}`; - this.log.debug(msg); - throw new Error(msg); - } - - // If our tip of the chain is different from the tip on L1, we should not propose a block - // @note This will change along with the data publication changes. - if (tipArchive && !archive.equals(tipArchive)) { - const msg = `Tip archive does not match the one from the L1`; - this.log.debug(msg); - throw new Error(msg); - } - - // Make sure I'm caught up to the pending chain - if ( - pendingBlockNumber > 0 && - (historicalHeader == undefined || historicalHeader.globalVariables.blockNumber.toBigInt() != pendingBlockNumber) - ) { - const msg = `Not caught up to pending block ${pendingBlockNumber}`; - this.log.debug(msg); - throw new Error(msg); - } - - // If I have constructed a block, make sure that the block number matches the next pending block number - if (globalVariables) { - if (globalVariables.blockNumber.toBigInt() !== pendingBlockNumber + 1n) { - const msg = `Block number mismatch. Expected ${ - pendingBlockNumber + 1n - } but got ${globalVariables.blockNumber.toBigInt()}`; - this.log.debug(msg); - throw new Error(msg); - } + async mayProposeBlock(tipArchive: Buffer, proposalBlockNumber: bigint): Promise { + // This checks that we can propose, and gives us the slot that we are to propose for + try { + const [slot, blockNumber] = await this.publisher.canProposeAtNextEthBlock(tipArchive); - const currentBlockNumber = await this.l2BlockSource.getBlockNumber(); - if (currentBlockNumber + 1 !== globalVariables.blockNumber.toNumber()) { - this.metrics.recordCancelledBlock(); - const msg = 'New block was emitted while building block'; + if (proposalBlockNumber !== blockNumber) { + const msg = `Block number mismatch. Expected ${proposalBlockNumber} but got ${blockNumber}`; this.log.debug(msg); throw new Error(msg); } - } - // Do not go forward if there was already a block for the slot - if (historicalHeader && historicalHeader.globalVariables.slotNumber.toBigInt() === slot) { - const msg = `Block already exists for slot ${slot}`; - this.log.debug(msg); - throw new Error(msg); - } - - // Related to _validateHeaderForSubmissionSequencerSelection - - if (IS_DEV_NET) { - // If we are in devnet, make sure that we are a validator - if ((await this.publisher.getValidatorCount()) != 0n && !(await this.publisher.amIAValidator())) { - const msg = 'Not a validator in devnet'; - this.log.debug(msg); - throw new Error(msg); - } - } else { - // If I have a constructed a block, make sure that the slot matches the current slot number - if (globalVariables) { - if (slot !== globalVariables.slotNumber.toBigInt()) { - const msg = `Slot number mismatch. Expected ${slot} but got ${globalVariables.slotNumber.toBigInt()}`; - this.log.debug(msg); - throw new Error(msg); + return slot; + } catch (err) { + if (err instanceof BaseError) { + const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); + if (revertError instanceof ContractFunctionRevertedError) { + const errorName = revertError.data?.errorName ?? ''; + this.log.debug(`canProposeAtTime failed with "${errorName}"`); } } - - // Do not go forward with new block if not free for all or my turn - if (!proposer.isZero() && !proposer.equals(await this.publisher.getSenderAddress())) { - const msg = 'Not my turn to submit block'; - this.log.debug(msg); - throw new Error(msg); - } + throw err; } - - return slot; } shouldProposeBlock(historicalHeader: Header | undefined, args: ShouldProposeArgs): boolean { @@ -440,18 +381,16 @@ export class Sequencer { return true; } - @trackSpan( - 'Sequencer.buildBlockAndPublish', - (_validTxs, newGlobalVariables, _historicalHeader, _chainTipArchive) => ({ - [Attributes.BLOCK_NUMBER]: newGlobalVariables.blockNumber.toNumber(), - }), - ) + @trackSpan('Sequencer.buildBlockAndPublish', (_validTxs, proposalHeader, _historicalHeader) => ({ + [Attributes.BLOCK_NUMBER]: proposalHeader.globalVariables.blockNumber.toNumber(), + })) private async buildBlockAndPublish( validTxs: Tx[], - newGlobalVariables: GlobalVariables, + proposalHeader: Header, historicalHeader: Header | undefined, - chainTipArchive: Buffer | undefined, ): Promise { + const newGlobalVariables = proposalHeader.globalVariables; + this.metrics.recordNewBlock(newGlobalVariables.blockNumber.toNumber(), validTxs.length); const workTimer = new Timer(); this.state = SequencerState.CREATING_BLOCK; @@ -483,7 +422,7 @@ export class Sequencer { await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); } - await this.canProposeBlock(historicalHeader, newGlobalVariables, chainTipArchive); + await this.publisher.validateBlockForSubmission(proposalHeader); if ( !this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length, @@ -508,7 +447,7 @@ export class Sequencer { // Block is ready, now finalise const { block } = await blockBuilder.finaliseBlock(); - await this.canProposeBlock(historicalHeader, newGlobalVariables, chainTipArchive); + await this.publisher.validateBlockForSubmission(block.header); const workDuration = workTimer.ms(); this.log.verbose( @@ -530,7 +469,6 @@ export class Sequencer { this.isFlushing = false; const attestations = await this.collectAttestations(block); - await this.canProposeBlock(historicalHeader, newGlobalVariables, chainTipArchive); try { await this.publishL2Block(block, attestations); @@ -565,6 +503,7 @@ export class Sequencer { // ; ; // / \ // _____________/_ __ \_____________ + if (IS_DEV_NET || !this.validatorClient) { return undefined; } From da2eb7a08d4ce7920fc3b55dd56328b13aacb7eb Mon Sep 17 00:00:00 2001 From: LHerskind Date: Wed, 28 Aug 2024 15:35:09 +0000 Subject: [PATCH 002/123] refactor: get rid of timetraveler from l1-publisher --- l1-contracts/test/fixtures/empty_block_1.json | 12 +- l1-contracts/test/fixtures/empty_block_2.json | 14 +-- l1-contracts/test/fixtures/mixed_block_1.json | 12 +- l1-contracts/test/fixtures/mixed_block_2.json | 14 +-- yarn-project/end-to-end/package.json | 10 +- yarn-project/end-to-end/package.local.json | 2 +- .../composed/integration_l1_publisher.test.ts | 1 - yarn-project/end-to-end/src/fixtures/index.ts | 1 + .../src/fixtures/snapshot_manager.ts | 19 +++ yarn-project/end-to-end/src/fixtures/utils.ts | 9 ++ .../end-to-end/src/fixtures/watcher.ts | 64 ++++++++++ yarn-project/foundation/src/config/env_var.ts | 1 - .../sequencer-client/src/publisher/config.ts | 12 +- .../src/publisher/l1-publisher.test.ts | 33 ++++- .../src/publisher/l1-publisher.ts | 115 +++++------------- .../src/sequencer/sequencer.test.ts | 53 ++++---- .../src/sequencer/sequencer.ts | 10 +- 17 files changed, 218 insertions(+), 164 deletions(-) create mode 100644 yarn-project/end-to-end/src/fixtures/watcher.ts diff --git a/l1-contracts/test/fixtures/empty_block_1.json b/l1-contracts/test/fixtures/empty_block_1.json index 4f277922e9d..07f8afdcbe3 100644 --- a/l1-contracts/test/fixtures/empty_block_1.json +++ b/l1-contracts/test/fixtures/empty_block_1.json @@ -8,7 +8,7 @@ "l2ToL1Messages": [] }, "block": { - "archive": "0x2ee6f8720f80fcc063c1042327b734823d51455e444f72fafc9af975f18ab9c5", + "archive": "0x0f24dbb7e2a507326574582c3f44c08266eb441e926f2d68ca112f358585669f", "body": "0x00000000", "txsEffectsHash": "0x00e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d6", "decodedHeader": { @@ -22,10 +22,10 @@ "blockNumber": 1, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000033", "chainId": 31337, - "timestamp": 1724858655, + "timestamp": 1724861610, "version": 1, - "coinbase": "0xf6ed6ac83b91e8a719e552711e90249ae6fb95e0", - "feeRecipient": "0x28e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0", + "coinbase": "0x872bd7c2a38898f9fc254b86f0fd95475f7eec20", + "feeRecipient": "0x2d78818b03bcaf7034fca9d658f1212c6b2aecd6839e03cc21e2846fc061202d", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -56,8 +56,8 @@ } } }, - "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001000000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000066cf411ff6ed6ac83b91e8a719e552711e90249ae6fb95e028e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00bc2933646ffc5ac63d957ffb345230d5fc0bfc14359cd13f3a77b60c5d1e1a", + "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001000000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000066cf4caa872bd7c2a38898f9fc254b86f0fd95475f7eec202d78818b03bcaf7034fca9d658f1212c6b2aecd6839e03cc21e2846fc061202d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x00a46e722d885c3bf479f8cf2a786adfde7a8c43740214163f7f9846914cbe6f", "numTxs": 0 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/empty_block_2.json b/l1-contracts/test/fixtures/empty_block_2.json index ecbf0057f01..76c3b241250 100644 --- a/l1-contracts/test/fixtures/empty_block_2.json +++ b/l1-contracts/test/fixtures/empty_block_2.json @@ -8,7 +8,7 @@ "l2ToL1Messages": [] }, "block": { - "archive": "0x0d67087b909c777b9b4c429d421047608d3b5113d6525f024403ba72f0e8c039", + "archive": "0x25126f40ad58d24006e6db629c2efb1e0868a50d5c21aa342894137b2b08bc0b", "body": "0x00000000", "txsEffectsHash": "0x00e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d6", "decodedHeader": { @@ -22,10 +22,10 @@ "blockNumber": 2, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000035", "chainId": 31337, - "timestamp": 1724858679, + "timestamp": 1724861634, "version": 1, - "coinbase": "0xf6ed6ac83b91e8a719e552711e90249ae6fb95e0", - "feeRecipient": "0x28e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0", + "coinbase": "0x872bd7c2a38898f9fc254b86f0fd95475f7eec20", + "feeRecipient": "0x2d78818b03bcaf7034fca9d658f1212c6b2aecd6839e03cc21e2846fc061202d", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -33,7 +33,7 @@ }, "lastArchive": { "nextAvailableLeafIndex": 2, - "root": "0x2ee6f8720f80fcc063c1042327b734823d51455e444f72fafc9af975f18ab9c5" + "root": "0x0f24dbb7e2a507326574582c3f44c08266eb441e926f2d68ca112f358585669f" }, "stateReference": { "l1ToL2MessageTree": { @@ -56,8 +56,8 @@ } } }, - "header": "0x2ee6f8720f80fcc063c1042327b734823d51455e444f72fafc9af975f18ab9c500000002000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000200b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000010019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000018023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000066cf4137f6ed6ac83b91e8a719e552711e90249ae6fb95e028e8cab987a2f3b89fcb16878a71c2076c2b73a6fb26505d8d8db8cf80bb82f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00d1b60b5c9c83f637568ce4355e0127dc27d65edd9f632ab7fb3b00c9fd2199", + "header": "0x0f24dbb7e2a507326574582c3f44c08266eb441e926f2d68ca112f358585669f00000002000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000200b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000010019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000018023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000066cf4cc2872bd7c2a38898f9fc254b86f0fd95475f7eec202d78818b03bcaf7034fca9d658f1212c6b2aecd6839e03cc21e2846fc061202d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x00de3870b035eca1a73e8adf24b738db855a034548805b300d8dbecfd2a6e66b", "numTxs": 0 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/mixed_block_1.json b/l1-contracts/test/fixtures/mixed_block_1.json index a74f4b6d39a..b38d3363fc4 100644 --- a/l1-contracts/test/fixtures/mixed_block_1.json +++ b/l1-contracts/test/fixtures/mixed_block_1.json @@ -58,7 +58,7 @@ ] }, "block": { - "archive": "0x16ff2b46f1343feaee80c00c6b6c8e4a403a5027f1a8127caa5e7c7d67b0e96a", + "archive": "0x1c4782ca647f1ed559bce9f625bfae6a561ce96481c49d9b31ce4df8f46124b1", "body": "0x00000004000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000001420000000000000000000000000000000000000000000000000000000000000143000000000000000000000000000000000000000000000000000000000000014400000000000000000000000000000000000000000000000000000000000001450000000000000000000000000000000000000000000000000000000000000146000000000000000000000000000000000000000000000000000000000000014700000000000000000000000000000000000000000000000000000000000001480000000000000000000000000000000000000000000000000000000000000149000000000000000000000000000000000000000000000000000000000000014a000000000000000000000000000000000000000000000000000000000000014b000000000000000000000000000000000000000000000000000000000000014c000000000000000000000000000000000000000000000000000000000000014d000000000000000000000000000000000000000000000000000000000000014e000000000000000000000000000000000000000000000000000000000000014f0000000000000000000000000000000000000000000000000000000000000150000000000000000000000000000000000000000000000000000000000000015100000000000000000000000000000000000000000000000000000000000001520000000000000000000000000000000000000000000000000000000000000153000000000000000000000000000000000000000000000000000000000000015400000000000000000000000000000000000000000000000000000000000001550000000000000000000000000000000000000000000000000000000000000156000000000000000000000000000000000000000000000000000000000000015700000000000000000000000000000000000000000000000000000000000001580000000000000000000000000000000000000000000000000000000000000159000000000000000000000000000000000000000000000000000000000000015a000000000000000000000000000000000000000000000000000000000000015b000000000000000000000000000000000000000000000000000000000000015c000000000000000000000000000000000000000000000000000000000000015d000000000000000000000000000000000000000000000000000000000000015e000000000000000000000000000000000000000000000000000000000000015f0000000000000000000000000000000000000000000000000000000000000160000000000000000000000000000000000000000000000000000000000000016100000000000000000000000000000000000000000000000000000000000001620000000000000000000000000000000000000000000000000000000000000163000000000000000000000000000000000000000000000000000000000000016400000000000000000000000000000000000000000000000000000000000001650000000000000000000000000000000000000000000000000000000000000166000000000000000000000000000000000000000000000000000000000000016700000000000000000000000000000000000000000000000000000000000001680000000000000000000000000000000000000000000000000000000000000169000000000000000000000000000000000000000000000000000000000000016a000000000000000000000000000000000000000000000000000000000000016b000000000000000000000000000000000000000000000000000000000000016c000000000000000000000000000000000000000000000000000000000000016d000000000000000000000000000000000000000000000000000000000000016e000000000000000000000000000000000000000000000000000000000000016f0000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000017100000000000000000000000000000000000000000000000000000000000001720000000000000000000000000000000000000000000000000000000000000173000000000000000000000000000000000000000000000000000000000000017400000000000000000000000000000000000000000000000000000000000001750000000000000000000000000000000000000000000000000000000000000176000000000000000000000000000000000000000000000000000000000000017700000000000000000000000000000000000000000000000000000000000001780000000000000000000000000000000000000000000000000000000000000179000000000000000000000000000000000000000000000000000000000000017a000000000000000000000000000000000000000000000000000000000000017b000000000000000000000000000000000000000000000000000000000000017c000000000000000000000000000000000000000000000000000000000000017d000000000000000000000000000000000000000000000000000000000000017e000000000000000000000000000000000000000000000000000000000000017f3f0000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e0800c47667396742a5474f325e2567bff3bb99b7f0bfd2b1a689b635d8b8726cce00284120278895e8d47084ae759f390e9634881e41369a257a36fe99a2369dc800a328b4a4a6ed156325253b4ab2af7ca00e21caf7963f0fba8a88ccdc3512c300705c99df420bface231f6855799db1d0ed7d39419abc47aa8c6efe2ae7aae2009299cc308de6d23788384411e024791a5b2448e455fbdd1d1f28f3ff76631f002d6c03d81ad764de51b0f34584645191cdc2aaae2ca08fb838d142b95d62f5003032f3618b2df0fa335d5fd548d6d85e42b4e7eb5fff9eb687facbbdecb8a60016cab7ddf4d1b440d53d10284c5c82a78b2d4e27dcdb44ef434ef4c6bad6783f0000000000000000000000000000000000000000000000000000000000000540000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000541000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000542000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000543000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000544000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000545000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005460000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000054700000000000000000000000000000000000000000000000000000000000005510000000000000000000000000000000000000000000000000000000000000548000000000000000000000000000000000000000000000000000000000000055200000000000000000000000000000000000000000000000000000000000005490000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000556000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000557000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000551000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000552000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005560000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000055700000000000000000000000000000000000000000000000000000000000005610000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000056200000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000566000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000567000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000561000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000562000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005660000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000056700000000000000000000000000000000000000000000000000000000000005710000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000057200000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000576000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000577000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000571000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000572000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000057f00000000000000000000000000000000000000000000000000000000000005760000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000057700000000000000000000000000000000000000000000000000000000000005810000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000058200000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000586000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000587000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000180000000000000000000000000000000000000000000000000000000000000018100000000000000000000000000000000000000000000000000000000000001820000000000000000000000000000000000000000000000000000000000000183000000000000000000000000000000000000000000000000000000000000018400000000000000000000000000000000000000000000000000000000000001850000000000000000000000000000000000000000000000000000000000000186000000000000000000000000000000000000000000000000000000000000018700000000000000000000000000000000000000000000000000000000000001880000000000000000000000000000000000000000000000000000000000000189000000000000000000000000000000000000000000000000000000000000018a000000000000000000000000000000000000000000000000000000000000018b000000000000000000000000000000000000000000000000000000000000018c000000000000000000000000000000000000000000000000000000000000018d000000000000000000000000000000000000000000000000000000000000018e000000000000000000000000000000000000000000000000000000000000018f0000000000000000000000000000000000000000000000000000000000000190000000000000000000000000000000000000000000000000000000000000019100000000000000000000000000000000000000000000000000000000000001920000000000000000000000000000000000000000000000000000000000000193000000000000000000000000000000000000000000000000000000000000019400000000000000000000000000000000000000000000000000000000000001950000000000000000000000000000000000000000000000000000000000000196000000000000000000000000000000000000000000000000000000000000019700000000000000000000000000000000000000000000000000000000000001980000000000000000000000000000000000000000000000000000000000000199000000000000000000000000000000000000000000000000000000000000019a000000000000000000000000000000000000000000000000000000000000019b000000000000000000000000000000000000000000000000000000000000019c000000000000000000000000000000000000000000000000000000000000019d000000000000000000000000000000000000000000000000000000000000019e000000000000000000000000000000000000000000000000000000000000019f00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000000000000000000000000000000001a100000000000000000000000000000000000000000000000000000000000001a200000000000000000000000000000000000000000000000000000000000001a300000000000000000000000000000000000000000000000000000000000001a400000000000000000000000000000000000000000000000000000000000001a500000000000000000000000000000000000000000000000000000000000001a600000000000000000000000000000000000000000000000000000000000001a700000000000000000000000000000000000000000000000000000000000001a800000000000000000000000000000000000000000000000000000000000001a900000000000000000000000000000000000000000000000000000000000001aa00000000000000000000000000000000000000000000000000000000000001ab00000000000000000000000000000000000000000000000000000000000001ac00000000000000000000000000000000000000000000000000000000000001ad00000000000000000000000000000000000000000000000000000000000001ae00000000000000000000000000000000000000000000000000000000000001af00000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b100000000000000000000000000000000000000000000000000000000000001b200000000000000000000000000000000000000000000000000000000000001b300000000000000000000000000000000000000000000000000000000000001b400000000000000000000000000000000000000000000000000000000000001b500000000000000000000000000000000000000000000000000000000000001b600000000000000000000000000000000000000000000000000000000000001b700000000000000000000000000000000000000000000000000000000000001b800000000000000000000000000000000000000000000000000000000000001b900000000000000000000000000000000000000000000000000000000000001ba00000000000000000000000000000000000000000000000000000000000001bb00000000000000000000000000000000000000000000000000000000000001bc00000000000000000000000000000000000000000000000000000000000001bd00000000000000000000000000000000000000000000000000000000000001be00000000000000000000000000000000000000000000000000000000000001bf3f0000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be0800789ff73d7787206612d96dfc2143f2344de21669a3f8cae7fe9a8918631eb00084a17f00bf8793b6851a106e9155543125e0be987ad3c8334456bdda171d0b00a400f8fd336ad84f467465964008238fd1b7f9c51c22912d706cd2b874d24e002c79bdd83c14ff50a46964f838ee207564909e28af79a57fc195810d36f9b20070083c6ef1e4dd88a064e94d2582283b203cf8a2ab1667f4370eda1b4c1fe8005373dffb5b590053d7762efcf9e11280f1486ce82e7996d94ee0f5d7c093bc009eefd90eb40e79c78bac1f71ec78bdc2f8b30041974239bdc765edffed813800ea95742e72792ca7a0f66ce9f55bc47dc09d5ea08c1b9018763102776978303f0000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000581000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000582000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005860000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000058700000000000000000000000000000000000000000000000000000000000005910000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000059200000000000000000000000000000000000000000000000000000000000005890000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000596000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000597000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000598000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005990000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000059a0000000000000000000000000000000000000000000000000000000000000591000000000000000000000000000000000000000000000000000000000000059b0000000000000000000000000000000000000000000000000000000000000592000000000000000000000000000000000000000000000000000000000000059c0000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000059d0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000059e0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000059f000000000000000000000000000000000000000000000000000000000000059600000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000000000000000059700000000000000000000000000000000000000000000000000000000000005a1000000000000000000000000000000000000000000000000000000000000059800000000000000000000000000000000000000000000000000000000000005a2000000000000000000000000000000000000000000000000000000000000059900000000000000000000000000000000000000000000000000000000000005a3000000000000000000000000000000000000000000000000000000000000059a00000000000000000000000000000000000000000000000000000000000005a4000000000000000000000000000000000000000000000000000000000000059b00000000000000000000000000000000000000000000000000000000000005a5000000000000000000000000000000000000000000000000000000000000059c00000000000000000000000000000000000000000000000000000000000005a6000000000000000000000000000000000000000000000000000000000000059d00000000000000000000000000000000000000000000000000000000000005a7000000000000000000000000000000000000000000000000000000000000059e00000000000000000000000000000000000000000000000000000000000005a8000000000000000000000000000000000000000000000000000000000000059f00000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005a100000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005a200000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005a300000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005a400000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005a500000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005a600000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005a700000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005a800000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005bf00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000001c100000000000000000000000000000000000000000000000000000000000001c200000000000000000000000000000000000000000000000000000000000001c300000000000000000000000000000000000000000000000000000000000001c400000000000000000000000000000000000000000000000000000000000001c500000000000000000000000000000000000000000000000000000000000001c600000000000000000000000000000000000000000000000000000000000001c700000000000000000000000000000000000000000000000000000000000001c800000000000000000000000000000000000000000000000000000000000001c900000000000000000000000000000000000000000000000000000000000001ca00000000000000000000000000000000000000000000000000000000000001cb00000000000000000000000000000000000000000000000000000000000001cc00000000000000000000000000000000000000000000000000000000000001cd00000000000000000000000000000000000000000000000000000000000001ce00000000000000000000000000000000000000000000000000000000000001cf00000000000000000000000000000000000000000000000000000000000001d000000000000000000000000000000000000000000000000000000000000001d100000000000000000000000000000000000000000000000000000000000001d200000000000000000000000000000000000000000000000000000000000001d300000000000000000000000000000000000000000000000000000000000001d400000000000000000000000000000000000000000000000000000000000001d500000000000000000000000000000000000000000000000000000000000001d600000000000000000000000000000000000000000000000000000000000001d700000000000000000000000000000000000000000000000000000000000001d800000000000000000000000000000000000000000000000000000000000001d900000000000000000000000000000000000000000000000000000000000001da00000000000000000000000000000000000000000000000000000000000001db00000000000000000000000000000000000000000000000000000000000001dc00000000000000000000000000000000000000000000000000000000000001dd00000000000000000000000000000000000000000000000000000000000001de00000000000000000000000000000000000000000000000000000000000001df00000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000001e100000000000000000000000000000000000000000000000000000000000001e200000000000000000000000000000000000000000000000000000000000001e300000000000000000000000000000000000000000000000000000000000001e400000000000000000000000000000000000000000000000000000000000001e500000000000000000000000000000000000000000000000000000000000001e600000000000000000000000000000000000000000000000000000000000001e700000000000000000000000000000000000000000000000000000000000001e800000000000000000000000000000000000000000000000000000000000001e900000000000000000000000000000000000000000000000000000000000001ea00000000000000000000000000000000000000000000000000000000000001eb00000000000000000000000000000000000000000000000000000000000001ec00000000000000000000000000000000000000000000000000000000000001ed00000000000000000000000000000000000000000000000000000000000001ee00000000000000000000000000000000000000000000000000000000000001ef00000000000000000000000000000000000000000000000000000000000001f000000000000000000000000000000000000000000000000000000000000001f100000000000000000000000000000000000000000000000000000000000001f200000000000000000000000000000000000000000000000000000000000001f300000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000001f500000000000000000000000000000000000000000000000000000000000001f600000000000000000000000000000000000000000000000000000000000001f700000000000000000000000000000000000000000000000000000000000001f800000000000000000000000000000000000000000000000000000000000001f900000000000000000000000000000000000000000000000000000000000001fa00000000000000000000000000000000000000000000000000000000000001fb00000000000000000000000000000000000000000000000000000000000001fc00000000000000000000000000000000000000000000000000000000000001fd00000000000000000000000000000000000000000000000000000000000001fe00000000000000000000000000000000000000000000000000000000000001ff3f00000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe0800a68d2df6e48c8b31f4c76529e586de2cd9d0f2f2fdfbc4c523db9e3a29e9b80016c236e57bf17afe324437acd1060772e3f31d4f9e734ad758d0627c4ba2a200d659011ddde95e32886bdd8c9464da1ca144ccadd539f0992b4abb491d812a00c8025bb9006a976ebd6ad940155f15f89ca0bb7312b53841fc257e7ed689c8004165f2c46b70fb183468b38f31bba7aa9d22ce8dfb61fe721470729ce1c6b100afeb60dd983514ebbaee141b2196f3eb3c9c299f576a0097872cc85a79a43f005f6bfee53d20a474901493558419dbceb3aca40e5e18915d38031498f4d2bb008791217ee341ec5dc11f7d7a31160fb1b1f2cf767062970e9526e5751956253f00000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005c800000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005c900000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005f600000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005f700000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005f800000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005f900000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005fa00000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005fb00000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005fc00000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005fd00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005fe00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ff00000000000000000000000000000000000000000000000000000000000005f6000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000005f7000000000000000000000000000000000000000000000000000000000000060100000000000000000000000000000000000000000000000000000000000005f8000000000000000000000000000000000000000000000000000000000000060200000000000000000000000000000000000000000000000000000000000005f9000000000000000000000000000000000000000000000000000000000000060300000000000000000000000000000000000000000000000000000000000005fa000000000000000000000000000000000000000000000000000000000000060400000000000000000000000000000000000000000000000000000000000005fb000000000000000000000000000000000000000000000000000000000000060500000000000000000000000000000000000000000000000000000000000005fc000000000000000000000000000000000000000000000000000000000000060600000000000000000000000000000000000000000000000000000000000005fd000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000005fe0000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020100000000000000000000000000000000000000000000000000000000000002020000000000000000000000000000000000000000000000000000000000000203000000000000000000000000000000000000000000000000000000000000020400000000000000000000000000000000000000000000000000000000000002050000000000000000000000000000000000000000000000000000000000000206000000000000000000000000000000000000000000000000000000000000020700000000000000000000000000000000000000000000000000000000000002080000000000000000000000000000000000000000000000000000000000000209000000000000000000000000000000000000000000000000000000000000020a000000000000000000000000000000000000000000000000000000000000020b000000000000000000000000000000000000000000000000000000000000020c000000000000000000000000000000000000000000000000000000000000020d000000000000000000000000000000000000000000000000000000000000020e000000000000000000000000000000000000000000000000000000000000020f0000000000000000000000000000000000000000000000000000000000000210000000000000000000000000000000000000000000000000000000000000021100000000000000000000000000000000000000000000000000000000000002120000000000000000000000000000000000000000000000000000000000000213000000000000000000000000000000000000000000000000000000000000021400000000000000000000000000000000000000000000000000000000000002150000000000000000000000000000000000000000000000000000000000000216000000000000000000000000000000000000000000000000000000000000021700000000000000000000000000000000000000000000000000000000000002180000000000000000000000000000000000000000000000000000000000000219000000000000000000000000000000000000000000000000000000000000021a000000000000000000000000000000000000000000000000000000000000021b000000000000000000000000000000000000000000000000000000000000021c000000000000000000000000000000000000000000000000000000000000021d000000000000000000000000000000000000000000000000000000000000021e000000000000000000000000000000000000000000000000000000000000021f0000000000000000000000000000000000000000000000000000000000000220000000000000000000000000000000000000000000000000000000000000022100000000000000000000000000000000000000000000000000000000000002220000000000000000000000000000000000000000000000000000000000000223000000000000000000000000000000000000000000000000000000000000022400000000000000000000000000000000000000000000000000000000000002250000000000000000000000000000000000000000000000000000000000000226000000000000000000000000000000000000000000000000000000000000022700000000000000000000000000000000000000000000000000000000000002280000000000000000000000000000000000000000000000000000000000000229000000000000000000000000000000000000000000000000000000000000022a000000000000000000000000000000000000000000000000000000000000022b000000000000000000000000000000000000000000000000000000000000022c000000000000000000000000000000000000000000000000000000000000022d000000000000000000000000000000000000000000000000000000000000022e000000000000000000000000000000000000000000000000000000000000022f0000000000000000000000000000000000000000000000000000000000000230000000000000000000000000000000000000000000000000000000000000023100000000000000000000000000000000000000000000000000000000000002320000000000000000000000000000000000000000000000000000000000000233000000000000000000000000000000000000000000000000000000000000023400000000000000000000000000000000000000000000000000000000000002350000000000000000000000000000000000000000000000000000000000000236000000000000000000000000000000000000000000000000000000000000023700000000000000000000000000000000000000000000000000000000000002380000000000000000000000000000000000000000000000000000000000000239000000000000000000000000000000000000000000000000000000000000023a000000000000000000000000000000000000000000000000000000000000023b000000000000000000000000000000000000000000000000000000000000023c000000000000000000000000000000000000000000000000000000000000023d000000000000000000000000000000000000000000000000000000000000023e000000000000000000000000000000000000000000000000000000000000023f3f0000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e0800c064a9343bfaf1345a5ad188e96aa4c5bad0b4a5590da51a9ff8f3c98ade8d008803d57dd0923c95a01b2bfbee4df6d66dc7baee1d2cd2770575feb86d040200ea64eed9489feb7bdf29bf817a6e8772e549da7b291028852d0dd3810cee9500947e8f904d41be8a4e08b146b4e1f0cd88f55722ef987d1d485c4196ab9f71002e5d9ed5d71bc3bea6edf988c4b9ba7fceb0815180d893852ed343c64ab55c0040f7f519ec89d360d83e242b6c0ce049d2b3356b8cfbf1ff3b733ef0f8a089006f5cfe5fc4a7b87c634f9e253a7cea2052229250d0c0e913eb50b5ef3612de00b759fce0eed36bb653b67255cce111b3529c383bd7d2b758f8cb53a4451f273f0000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000601000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000602000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000603000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000604000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000605000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006060000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000006110000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000061200000000000000000000000000000000000000000000000000000000000006090000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000616000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000617000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000611000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000612000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006160000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000061700000000000000000000000000000000000000000000000000000000000006210000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000062200000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000626000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000627000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000621000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000622000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006260000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000062700000000000000000000000000000000000000000000000000000000000006310000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000063200000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000636000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000637000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000631000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000632000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000063f00000000000000000000000000000000000000000000000000000000000006360000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000063700000000000000000000000000000000000000000000000000000000000006410000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000064200000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000646000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000647000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "txsEffectsHash": "0x00e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b", "decodedHeader": { @@ -72,10 +72,10 @@ "blockNumber": 1, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000043", "chainId": 31337, - "timestamp": 1724857731, + "timestamp": 1724860686, "version": 1, - "coinbase": "0x210c97d1d20ae59929bf8ac222b5508cd9c37d2a", - "feeRecipient": "0x2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43", + "coinbase": "0x9ca40d2eb00ca73819f826b0a788dd9891e21d13", + "feeRecipient": "0x07803e414075315a9195dd8f9ef9154cc40db0c0a4cb08aa98e253635741c0f2", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -106,8 +106,8 @@ } } }, - "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000400e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00d0169cc64b8f1bd695ec8611a5602da48854dc4cc04989c4b63288b339cb1814f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000101a995cda6f326074cf650c6644269e29dbd0532e6a832238345b53ee70c878af000001000deac8396e31bc1196b442ad724bf8f751a245e518147d738cc84b9e1a56b4420000018023866f4c16f3ea1f37dd2ca42d1a635ea909b6c016e45e8434780d3741eb7dbb000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000066cf3d83210c97d1d20ae59929bf8ac222b5508cd9c37d2a2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x003558e5dac17899221e594eb6d2f2089ce984a674aa63d3815afbeb964588a7", + "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000400e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00d0169cc64b8f1bd695ec8611a5602da48854dc4cc04989c4b63288b339cb1814f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000101a995cda6f326074cf650c6644269e29dbd0532e6a832238345b53ee70c878af000001000deac8396e31bc1196b442ad724bf8f751a245e518147d738cc84b9e1a56b4420000018023866f4c16f3ea1f37dd2ca42d1a635ea909b6c016e45e8434780d3741eb7dbb000001800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000066cf490e9ca40d2eb00ca73819f826b0a788dd9891e21d1307803e414075315a9195dd8f9ef9154cc40db0c0a4cb08aa98e253635741c0f2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x004c32b99c2c2cfac2f922454f3b7dd86ee2c8c34c98369b5b6f78a4703ae74f", "numTxs": 4 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/mixed_block_2.json b/l1-contracts/test/fixtures/mixed_block_2.json index 80906bf0265..8b58730d955 100644 --- a/l1-contracts/test/fixtures/mixed_block_2.json +++ b/l1-contracts/test/fixtures/mixed_block_2.json @@ -58,7 +58,7 @@ ] }, "block": { - "archive": "0x1926af497e574a918cf90e15520a8fdf4ff569f2d39fc4e72c7d23da4abbdb0a", + "archive": "0x1ae32d8b8b3e677d02908a8922a6d03d2ee72021039bf6a6e0fbea570b8f7b47", "body": "0x00000004000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e000000000000000000000000000000000000000000000000000000000000027f3f0000000000000000000000000000000000000000000000000000000000000340000000000000000000000000000000000000000000000000000000000000034100000000000000000000000000000000000000000000000000000000000003420000000000000000000000000000000000000000000000000000000000000343000000000000000000000000000000000000000000000000000000000000034400000000000000000000000000000000000000000000000000000000000003450000000000000000000000000000000000000000000000000000000000000346000000000000000000000000000000000000000000000000000000000000034700000000000000000000000000000000000000000000000000000000000003480000000000000000000000000000000000000000000000000000000000000349000000000000000000000000000000000000000000000000000000000000034a000000000000000000000000000000000000000000000000000000000000034b000000000000000000000000000000000000000000000000000000000000034c000000000000000000000000000000000000000000000000000000000000034d000000000000000000000000000000000000000000000000000000000000034e000000000000000000000000000000000000000000000000000000000000034f0000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000000000035100000000000000000000000000000000000000000000000000000000000003520000000000000000000000000000000000000000000000000000000000000353000000000000000000000000000000000000000000000000000000000000035400000000000000000000000000000000000000000000000000000000000003550000000000000000000000000000000000000000000000000000000000000356000000000000000000000000000000000000000000000000000000000000035700000000000000000000000000000000000000000000000000000000000003580000000000000000000000000000000000000000000000000000000000000359000000000000000000000000000000000000000000000000000000000000035a000000000000000000000000000000000000000000000000000000000000035b000000000000000000000000000000000000000000000000000000000000035c000000000000000000000000000000000000000000000000000000000000035d000000000000000000000000000000000000000000000000000000000000035e000000000000000000000000000000000000000000000000000000000000035f0000000000000000000000000000000000000000000000000000000000000360000000000000000000000000000000000000000000000000000000000000036100000000000000000000000000000000000000000000000000000000000003620000000000000000000000000000000000000000000000000000000000000363000000000000000000000000000000000000000000000000000000000000036400000000000000000000000000000000000000000000000000000000000003650000000000000000000000000000000000000000000000000000000000000366000000000000000000000000000000000000000000000000000000000000036700000000000000000000000000000000000000000000000000000000000003680000000000000000000000000000000000000000000000000000000000000369000000000000000000000000000000000000000000000000000000000000036a000000000000000000000000000000000000000000000000000000000000036b000000000000000000000000000000000000000000000000000000000000036c000000000000000000000000000000000000000000000000000000000000036d000000000000000000000000000000000000000000000000000000000000036e000000000000000000000000000000000000000000000000000000000000036f0000000000000000000000000000000000000000000000000000000000000370000000000000000000000000000000000000000000000000000000000000037100000000000000000000000000000000000000000000000000000000000003720000000000000000000000000000000000000000000000000000000000000373000000000000000000000000000000000000000000000000000000000000037400000000000000000000000000000000000000000000000000000000000003750000000000000000000000000000000000000000000000000000000000000376000000000000000000000000000000000000000000000000000000000000037700000000000000000000000000000000000000000000000000000000000003780000000000000000000000000000000000000000000000000000000000000379000000000000000000000000000000000000000000000000000000000000037a000000000000000000000000000000000000000000000000000000000000037b000000000000000000000000000000000000000000000000000000000000037c000000000000000000000000000000000000000000000000000000000000037d000000000000000000000000000000000000000000000000000000000000037e08003bbb1177505f3433bb062787fcac6585d30fa1a7e0b809ca5eef1cecc56cd0002d5b86280022d106b72e4425ea49f927ca2b8138fc13b3d9feeaf36ae61fb100adab9d66b73ae23a6e9127ebe0bcd963ef4312dd66878a6be131d39a7ee01c00d48f30dbb82c96c734c8abe33f4f9f75e6d0ba4462ee07d73c5335e04a02e900f31a3a4e7333ac6043a929fca12f5347e9e8bf1d67f5993860a774f10b77bc00230e3132bfa5df23e2e018b20cd4e0c75555825ee7924da73f017a279d39cd0095e2affd6b67c6ecbf77fa1e638139498e1642abb468c58ca8ce275260ea6100362e0941035fd171fab926caf55d18b22f7de99d60ac6f454386a9cce4a1ff3f0000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000064a0000000000000000000000000000000000000000000000000000000000000641000000000000000000000000000000000000000000000000000000000000064b0000000000000000000000000000000000000000000000000000000000000642000000000000000000000000000000000000000000000000000000000000064c0000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000064d0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000064e0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000064f00000000000000000000000000000000000000000000000000000000000006460000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000064700000000000000000000000000000000000000000000000000000000000006510000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000065200000000000000000000000000000000000000000000000000000000000006490000000000000000000000000000000000000000000000000000000000000653000000000000000000000000000000000000000000000000000000000000064a0000000000000000000000000000000000000000000000000000000000000654000000000000000000000000000000000000000000000000000000000000064b0000000000000000000000000000000000000000000000000000000000000655000000000000000000000000000000000000000000000000000000000000064c0000000000000000000000000000000000000000000000000000000000000656000000000000000000000000000000000000000000000000000000000000064d0000000000000000000000000000000000000000000000000000000000000657000000000000000000000000000000000000000000000000000000000000064e0000000000000000000000000000000000000000000000000000000000000658000000000000000000000000000000000000000000000000000000000000064f00000000000000000000000000000000000000000000000000000000000006590000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000065a0000000000000000000000000000000000000000000000000000000000000651000000000000000000000000000000000000000000000000000000000000065b0000000000000000000000000000000000000000000000000000000000000652000000000000000000000000000000000000000000000000000000000000065c0000000000000000000000000000000000000000000000000000000000000653000000000000000000000000000000000000000000000000000000000000065d0000000000000000000000000000000000000000000000000000000000000654000000000000000000000000000000000000000000000000000000000000065e0000000000000000000000000000000000000000000000000000000000000655000000000000000000000000000000000000000000000000000000000000065f00000000000000000000000000000000000000000000000000000000000006560000000000000000000000000000000000000000000000000000000000000660000000000000000000000000000000000000000000000000000000000000065700000000000000000000000000000000000000000000000000000000000006610000000000000000000000000000000000000000000000000000000000000658000000000000000000000000000000000000000000000000000000000000066200000000000000000000000000000000000000000000000000000000000006590000000000000000000000000000000000000000000000000000000000000663000000000000000000000000000000000000000000000000000000000000065a0000000000000000000000000000000000000000000000000000000000000664000000000000000000000000000000000000000000000000000000000000065b0000000000000000000000000000000000000000000000000000000000000665000000000000000000000000000000000000000000000000000000000000065c0000000000000000000000000000000000000000000000000000000000000666000000000000000000000000000000000000000000000000000000000000065d0000000000000000000000000000000000000000000000000000000000000667000000000000000000000000000000000000000000000000000000000000065e0000000000000000000000000000000000000000000000000000000000000668000000000000000000000000000000000000000000000000000000000000065f00000000000000000000000000000000000000000000000000000000000006690000000000000000000000000000000000000000000000000000000000000660000000000000000000000000000000000000000000000000000000000000066a0000000000000000000000000000000000000000000000000000000000000661000000000000000000000000000000000000000000000000000000000000066b0000000000000000000000000000000000000000000000000000000000000662000000000000000000000000000000000000000000000000000000000000066c0000000000000000000000000000000000000000000000000000000000000663000000000000000000000000000000000000000000000000000000000000066d0000000000000000000000000000000000000000000000000000000000000664000000000000000000000000000000000000000000000000000000000000066e0000000000000000000000000000000000000000000000000000000000000665000000000000000000000000000000000000000000000000000000000000066f00000000000000000000000000000000000000000000000000000000000006660000000000000000000000000000000000000000000000000000000000000670000000000000000000000000000000000000000000000000000000000000066700000000000000000000000000000000000000000000000000000000000006710000000000000000000000000000000000000000000000000000000000000668000000000000000000000000000000000000000000000000000000000000067200000000000000000000000000000000000000000000000000000000000006690000000000000000000000000000000000000000000000000000000000000673000000000000000000000000000000000000000000000000000000000000066a0000000000000000000000000000000000000000000000000000000000000674000000000000000000000000000000000000000000000000000000000000066b0000000000000000000000000000000000000000000000000000000000000675000000000000000000000000000000000000000000000000000000000000066c0000000000000000000000000000000000000000000000000000000000000676000000000000000000000000000000000000000000000000000000000000066d0000000000000000000000000000000000000000000000000000000000000677000000000000000000000000000000000000000000000000000000000000066e0000000000000000000000000000000000000000000000000000000000000678000000000000000000000000000000000000000000000000000000000000066f00000000000000000000000000000000000000000000000000000000000006790000000000000000000000000000000000000000000000000000000000000670000000000000000000000000000000000000000000000000000000000000067a0000000000000000000000000000000000000000000000000000000000000671000000000000000000000000000000000000000000000000000000000000067b0000000000000000000000000000000000000000000000000000000000000672000000000000000000000000000000000000000000000000000000000000067c0000000000000000000000000000000000000000000000000000000000000673000000000000000000000000000000000000000000000000000000000000067d0000000000000000000000000000000000000000000000000000000000000674000000000000000000000000000000000000000000000000000000000000067e0000000000000000000000000000000000000000000000000000000000000675000000000000000000000000000000000000000000000000000000000000067f00000000000000000000000000000000000000000000000000000000000006760000000000000000000000000000000000000000000000000000000000000680000000000000000000000000000000000000000000000000000000000000067700000000000000000000000000000000000000000000000000000000000006810000000000000000000000000000000000000000000000000000000000000678000000000000000000000000000000000000000000000000000000000000068200000000000000000000000000000000000000000000000000000000000006790000000000000000000000000000000000000000000000000000000000000683000000000000000000000000000000000000000000000000000000000000067a0000000000000000000000000000000000000000000000000000000000000684000000000000000000000000000000000000000000000000000000000000067b0000000000000000000000000000000000000000000000000000000000000685000000000000000000000000000000000000000000000000000000000000067c0000000000000000000000000000000000000000000000000000000000000686000000000000000000000000000000000000000000000000000000000000067d0000000000000000000000000000000000000000000000000000000000000687000000000000000000000000000000000000000000000000000000000000067e0000000000000000000000000000000000000000000000000000000000000688000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be00000000000000000000000000000000000000000000000000000000000002bf3f0000000000000000000000000000000000000000000000000000000000000380000000000000000000000000000000000000000000000000000000000000038100000000000000000000000000000000000000000000000000000000000003820000000000000000000000000000000000000000000000000000000000000383000000000000000000000000000000000000000000000000000000000000038400000000000000000000000000000000000000000000000000000000000003850000000000000000000000000000000000000000000000000000000000000386000000000000000000000000000000000000000000000000000000000000038700000000000000000000000000000000000000000000000000000000000003880000000000000000000000000000000000000000000000000000000000000389000000000000000000000000000000000000000000000000000000000000038a000000000000000000000000000000000000000000000000000000000000038b000000000000000000000000000000000000000000000000000000000000038c000000000000000000000000000000000000000000000000000000000000038d000000000000000000000000000000000000000000000000000000000000038e000000000000000000000000000000000000000000000000000000000000038f0000000000000000000000000000000000000000000000000000000000000390000000000000000000000000000000000000000000000000000000000000039100000000000000000000000000000000000000000000000000000000000003920000000000000000000000000000000000000000000000000000000000000393000000000000000000000000000000000000000000000000000000000000039400000000000000000000000000000000000000000000000000000000000003950000000000000000000000000000000000000000000000000000000000000396000000000000000000000000000000000000000000000000000000000000039700000000000000000000000000000000000000000000000000000000000003980000000000000000000000000000000000000000000000000000000000000399000000000000000000000000000000000000000000000000000000000000039a000000000000000000000000000000000000000000000000000000000000039b000000000000000000000000000000000000000000000000000000000000039c000000000000000000000000000000000000000000000000000000000000039d000000000000000000000000000000000000000000000000000000000000039e000000000000000000000000000000000000000000000000000000000000039f00000000000000000000000000000000000000000000000000000000000003a000000000000000000000000000000000000000000000000000000000000003a100000000000000000000000000000000000000000000000000000000000003a200000000000000000000000000000000000000000000000000000000000003a300000000000000000000000000000000000000000000000000000000000003a400000000000000000000000000000000000000000000000000000000000003a500000000000000000000000000000000000000000000000000000000000003a600000000000000000000000000000000000000000000000000000000000003a700000000000000000000000000000000000000000000000000000000000003a800000000000000000000000000000000000000000000000000000000000003a900000000000000000000000000000000000000000000000000000000000003aa00000000000000000000000000000000000000000000000000000000000003ab00000000000000000000000000000000000000000000000000000000000003ac00000000000000000000000000000000000000000000000000000000000003ad00000000000000000000000000000000000000000000000000000000000003ae00000000000000000000000000000000000000000000000000000000000003af00000000000000000000000000000000000000000000000000000000000003b000000000000000000000000000000000000000000000000000000000000003b100000000000000000000000000000000000000000000000000000000000003b200000000000000000000000000000000000000000000000000000000000003b300000000000000000000000000000000000000000000000000000000000003b400000000000000000000000000000000000000000000000000000000000003b500000000000000000000000000000000000000000000000000000000000003b600000000000000000000000000000000000000000000000000000000000003b700000000000000000000000000000000000000000000000000000000000003b800000000000000000000000000000000000000000000000000000000000003b900000000000000000000000000000000000000000000000000000000000003ba00000000000000000000000000000000000000000000000000000000000003bb00000000000000000000000000000000000000000000000000000000000003bc00000000000000000000000000000000000000000000000000000000000003bd00000000000000000000000000000000000000000000000000000000000003be08004a32502b5d2a0cf5d776c92ef74de61a28e7882bdadeb3b0530f472704604300808c2412e2725ecaa6a6bd77e3159349e238dc7817f906ba32afd40b3cb3cb00d7d4e3b313c4cce9bd98c317ea715847a92d795b82a6f8b8144b7c61bee64200b76f07678c289f41378029b1353a73e1afbe241612a97c23e7fc059099f49d00c72046b39a9dc902cee36db5214c72315636bd824abfabdf80b1c5e6a01fd7006e0a74fec166a12f5a62954776784f01cba3be7ab876eef2e49a2ab7a5b0f900c83ddd8f6b91086bc83485adbe8056212b4d33b91840cd3dc649f9736881460022441e76225010acce7f429dc754fb3260ae1d387937978f69c67a879ed0733f0000000000000000000000000000000000000000000000000000000000000680000000000000000000000000000000000000000000000000000000000000068a0000000000000000000000000000000000000000000000000000000000000681000000000000000000000000000000000000000000000000000000000000068b0000000000000000000000000000000000000000000000000000000000000682000000000000000000000000000000000000000000000000000000000000068c0000000000000000000000000000000000000000000000000000000000000683000000000000000000000000000000000000000000000000000000000000068d0000000000000000000000000000000000000000000000000000000000000684000000000000000000000000000000000000000000000000000000000000068e0000000000000000000000000000000000000000000000000000000000000685000000000000000000000000000000000000000000000000000000000000068f00000000000000000000000000000000000000000000000000000000000006860000000000000000000000000000000000000000000000000000000000000690000000000000000000000000000000000000000000000000000000000000068700000000000000000000000000000000000000000000000000000000000006910000000000000000000000000000000000000000000000000000000000000688000000000000000000000000000000000000000000000000000000000000069200000000000000000000000000000000000000000000000000000000000006890000000000000000000000000000000000000000000000000000000000000693000000000000000000000000000000000000000000000000000000000000068a0000000000000000000000000000000000000000000000000000000000000694000000000000000000000000000000000000000000000000000000000000068b0000000000000000000000000000000000000000000000000000000000000695000000000000000000000000000000000000000000000000000000000000068c0000000000000000000000000000000000000000000000000000000000000696000000000000000000000000000000000000000000000000000000000000068d0000000000000000000000000000000000000000000000000000000000000697000000000000000000000000000000000000000000000000000000000000068e0000000000000000000000000000000000000000000000000000000000000698000000000000000000000000000000000000000000000000000000000000068f00000000000000000000000000000000000000000000000000000000000006990000000000000000000000000000000000000000000000000000000000000690000000000000000000000000000000000000000000000000000000000000069a0000000000000000000000000000000000000000000000000000000000000691000000000000000000000000000000000000000000000000000000000000069b0000000000000000000000000000000000000000000000000000000000000692000000000000000000000000000000000000000000000000000000000000069c0000000000000000000000000000000000000000000000000000000000000693000000000000000000000000000000000000000000000000000000000000069d0000000000000000000000000000000000000000000000000000000000000694000000000000000000000000000000000000000000000000000000000000069e0000000000000000000000000000000000000000000000000000000000000695000000000000000000000000000000000000000000000000000000000000069f000000000000000000000000000000000000000000000000000000000000069600000000000000000000000000000000000000000000000000000000000006a0000000000000000000000000000000000000000000000000000000000000069700000000000000000000000000000000000000000000000000000000000006a1000000000000000000000000000000000000000000000000000000000000069800000000000000000000000000000000000000000000000000000000000006a2000000000000000000000000000000000000000000000000000000000000069900000000000000000000000000000000000000000000000000000000000006a3000000000000000000000000000000000000000000000000000000000000069a00000000000000000000000000000000000000000000000000000000000006a4000000000000000000000000000000000000000000000000000000000000069b00000000000000000000000000000000000000000000000000000000000006a5000000000000000000000000000000000000000000000000000000000000069c00000000000000000000000000000000000000000000000000000000000006a6000000000000000000000000000000000000000000000000000000000000069d00000000000000000000000000000000000000000000000000000000000006a7000000000000000000000000000000000000000000000000000000000000069e00000000000000000000000000000000000000000000000000000000000006a8000000000000000000000000000000000000000000000000000000000000069f00000000000000000000000000000000000000000000000000000000000006a900000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006aa00000000000000000000000000000000000000000000000000000000000006a100000000000000000000000000000000000000000000000000000000000006ab00000000000000000000000000000000000000000000000000000000000006a200000000000000000000000000000000000000000000000000000000000006ac00000000000000000000000000000000000000000000000000000000000006a300000000000000000000000000000000000000000000000000000000000006ad00000000000000000000000000000000000000000000000000000000000006a400000000000000000000000000000000000000000000000000000000000006ae00000000000000000000000000000000000000000000000000000000000006a500000000000000000000000000000000000000000000000000000000000006af00000000000000000000000000000000000000000000000000000000000006a600000000000000000000000000000000000000000000000000000000000006b000000000000000000000000000000000000000000000000000000000000006a700000000000000000000000000000000000000000000000000000000000006b100000000000000000000000000000000000000000000000000000000000006a800000000000000000000000000000000000000000000000000000000000006b200000000000000000000000000000000000000000000000000000000000006a900000000000000000000000000000000000000000000000000000000000006b300000000000000000000000000000000000000000000000000000000000006aa00000000000000000000000000000000000000000000000000000000000006b400000000000000000000000000000000000000000000000000000000000006ab00000000000000000000000000000000000000000000000000000000000006b500000000000000000000000000000000000000000000000000000000000006ac00000000000000000000000000000000000000000000000000000000000006b600000000000000000000000000000000000000000000000000000000000006ad00000000000000000000000000000000000000000000000000000000000006b700000000000000000000000000000000000000000000000000000000000006ae00000000000000000000000000000000000000000000000000000000000006b800000000000000000000000000000000000000000000000000000000000006af00000000000000000000000000000000000000000000000000000000000006b900000000000000000000000000000000000000000000000000000000000006b000000000000000000000000000000000000000000000000000000000000006ba00000000000000000000000000000000000000000000000000000000000006b100000000000000000000000000000000000000000000000000000000000006bb00000000000000000000000000000000000000000000000000000000000006b200000000000000000000000000000000000000000000000000000000000006bc00000000000000000000000000000000000000000000000000000000000006b300000000000000000000000000000000000000000000000000000000000006bd00000000000000000000000000000000000000000000000000000000000006b400000000000000000000000000000000000000000000000000000000000006be00000000000000000000000000000000000000000000000000000000000006b500000000000000000000000000000000000000000000000000000000000006bf00000000000000000000000000000000000000000000000000000000000006b600000000000000000000000000000000000000000000000000000000000006c000000000000000000000000000000000000000000000000000000000000006b700000000000000000000000000000000000000000000000000000000000006c100000000000000000000000000000000000000000000000000000000000006b800000000000000000000000000000000000000000000000000000000000006c200000000000000000000000000000000000000000000000000000000000006b900000000000000000000000000000000000000000000000000000000000006c300000000000000000000000000000000000000000000000000000000000006ba00000000000000000000000000000000000000000000000000000000000006c400000000000000000000000000000000000000000000000000000000000006bb00000000000000000000000000000000000000000000000000000000000006c500000000000000000000000000000000000000000000000000000000000006bc00000000000000000000000000000000000000000000000000000000000006c600000000000000000000000000000000000000000000000000000000000006bd00000000000000000000000000000000000000000000000000000000000006c700000000000000000000000000000000000000000000000000000000000006be00000000000000000000000000000000000000000000000000000000000006c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe00000000000000000000000000000000000000000000000000000000000002ff3f00000000000000000000000000000000000000000000000000000000000003c000000000000000000000000000000000000000000000000000000000000003c100000000000000000000000000000000000000000000000000000000000003c200000000000000000000000000000000000000000000000000000000000003c300000000000000000000000000000000000000000000000000000000000003c400000000000000000000000000000000000000000000000000000000000003c500000000000000000000000000000000000000000000000000000000000003c600000000000000000000000000000000000000000000000000000000000003c700000000000000000000000000000000000000000000000000000000000003c800000000000000000000000000000000000000000000000000000000000003c900000000000000000000000000000000000000000000000000000000000003ca00000000000000000000000000000000000000000000000000000000000003cb00000000000000000000000000000000000000000000000000000000000003cc00000000000000000000000000000000000000000000000000000000000003cd00000000000000000000000000000000000000000000000000000000000003ce00000000000000000000000000000000000000000000000000000000000003cf00000000000000000000000000000000000000000000000000000000000003d000000000000000000000000000000000000000000000000000000000000003d100000000000000000000000000000000000000000000000000000000000003d200000000000000000000000000000000000000000000000000000000000003d300000000000000000000000000000000000000000000000000000000000003d400000000000000000000000000000000000000000000000000000000000003d500000000000000000000000000000000000000000000000000000000000003d600000000000000000000000000000000000000000000000000000000000003d700000000000000000000000000000000000000000000000000000000000003d800000000000000000000000000000000000000000000000000000000000003d900000000000000000000000000000000000000000000000000000000000003da00000000000000000000000000000000000000000000000000000000000003db00000000000000000000000000000000000000000000000000000000000003dc00000000000000000000000000000000000000000000000000000000000003dd00000000000000000000000000000000000000000000000000000000000003de00000000000000000000000000000000000000000000000000000000000003df00000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000003e100000000000000000000000000000000000000000000000000000000000003e200000000000000000000000000000000000000000000000000000000000003e300000000000000000000000000000000000000000000000000000000000003e400000000000000000000000000000000000000000000000000000000000003e500000000000000000000000000000000000000000000000000000000000003e600000000000000000000000000000000000000000000000000000000000003e700000000000000000000000000000000000000000000000000000000000003e800000000000000000000000000000000000000000000000000000000000003e900000000000000000000000000000000000000000000000000000000000003ea00000000000000000000000000000000000000000000000000000000000003eb00000000000000000000000000000000000000000000000000000000000003ec00000000000000000000000000000000000000000000000000000000000003ed00000000000000000000000000000000000000000000000000000000000003ee00000000000000000000000000000000000000000000000000000000000003ef00000000000000000000000000000000000000000000000000000000000003f000000000000000000000000000000000000000000000000000000000000003f100000000000000000000000000000000000000000000000000000000000003f200000000000000000000000000000000000000000000000000000000000003f300000000000000000000000000000000000000000000000000000000000003f400000000000000000000000000000000000000000000000000000000000003f500000000000000000000000000000000000000000000000000000000000003f600000000000000000000000000000000000000000000000000000000000003f700000000000000000000000000000000000000000000000000000000000003f800000000000000000000000000000000000000000000000000000000000003f900000000000000000000000000000000000000000000000000000000000003fa00000000000000000000000000000000000000000000000000000000000003fb00000000000000000000000000000000000000000000000000000000000003fc00000000000000000000000000000000000000000000000000000000000003fd00000000000000000000000000000000000000000000000000000000000003fe0800bb07f923e24c36227d717557d58d32b99370b81f7a70d1835becc1114f7d7700624291eff6ab801e5668bc6619a3df132f8b392b063f09dfe8a69e38224eb200bd6b195f8716ab3dc93c44037cac579d25d0e77c2782b5aa62534ee25d36bc008c99d470d2c53624a8c5bedfeffdcc5356f6da89ee0e372b3ea3fa4f8cd652009944e00a3f9a7d2e8a535d3a210c3271d5732518037704d67ef5d42a8b82c200523014cb6eabe4366c6e599ba96534fc15ecc804a13fbaaacf8717e1b0a8d20005621252c4b36c113f21ad6c13b99e5cef514bdd98ef0aae4075b5cb5a000a00d80cb2a60aae6c3d2e7d27fb1519a708a18bb5b5085f78684bdee7512856a93f00000000000000000000000000000000000000000000000000000000000006c000000000000000000000000000000000000000000000000000000000000006ca00000000000000000000000000000000000000000000000000000000000006c100000000000000000000000000000000000000000000000000000000000006cb00000000000000000000000000000000000000000000000000000000000006c200000000000000000000000000000000000000000000000000000000000006cc00000000000000000000000000000000000000000000000000000000000006c300000000000000000000000000000000000000000000000000000000000006cd00000000000000000000000000000000000000000000000000000000000006c400000000000000000000000000000000000000000000000000000000000006ce00000000000000000000000000000000000000000000000000000000000006c500000000000000000000000000000000000000000000000000000000000006cf00000000000000000000000000000000000000000000000000000000000006c600000000000000000000000000000000000000000000000000000000000006d000000000000000000000000000000000000000000000000000000000000006c700000000000000000000000000000000000000000000000000000000000006d100000000000000000000000000000000000000000000000000000000000006c800000000000000000000000000000000000000000000000000000000000006d200000000000000000000000000000000000000000000000000000000000006c900000000000000000000000000000000000000000000000000000000000006d300000000000000000000000000000000000000000000000000000000000006ca00000000000000000000000000000000000000000000000000000000000006d400000000000000000000000000000000000000000000000000000000000006cb00000000000000000000000000000000000000000000000000000000000006d500000000000000000000000000000000000000000000000000000000000006cc00000000000000000000000000000000000000000000000000000000000006d600000000000000000000000000000000000000000000000000000000000006cd00000000000000000000000000000000000000000000000000000000000006d700000000000000000000000000000000000000000000000000000000000006ce00000000000000000000000000000000000000000000000000000000000006d800000000000000000000000000000000000000000000000000000000000006cf00000000000000000000000000000000000000000000000000000000000006d900000000000000000000000000000000000000000000000000000000000006d000000000000000000000000000000000000000000000000000000000000006da00000000000000000000000000000000000000000000000000000000000006d100000000000000000000000000000000000000000000000000000000000006db00000000000000000000000000000000000000000000000000000000000006d200000000000000000000000000000000000000000000000000000000000006dc00000000000000000000000000000000000000000000000000000000000006d300000000000000000000000000000000000000000000000000000000000006dd00000000000000000000000000000000000000000000000000000000000006d400000000000000000000000000000000000000000000000000000000000006de00000000000000000000000000000000000000000000000000000000000006d500000000000000000000000000000000000000000000000000000000000006df00000000000000000000000000000000000000000000000000000000000006d600000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000006d700000000000000000000000000000000000000000000000000000000000006e100000000000000000000000000000000000000000000000000000000000006d800000000000000000000000000000000000000000000000000000000000006e200000000000000000000000000000000000000000000000000000000000006d900000000000000000000000000000000000000000000000000000000000006e300000000000000000000000000000000000000000000000000000000000006da00000000000000000000000000000000000000000000000000000000000006e400000000000000000000000000000000000000000000000000000000000006db00000000000000000000000000000000000000000000000000000000000006e500000000000000000000000000000000000000000000000000000000000006dc00000000000000000000000000000000000000000000000000000000000006e600000000000000000000000000000000000000000000000000000000000006dd00000000000000000000000000000000000000000000000000000000000006e700000000000000000000000000000000000000000000000000000000000006de00000000000000000000000000000000000000000000000000000000000006e800000000000000000000000000000000000000000000000000000000000006df00000000000000000000000000000000000000000000000000000000000006e900000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000006ea00000000000000000000000000000000000000000000000000000000000006e100000000000000000000000000000000000000000000000000000000000006eb00000000000000000000000000000000000000000000000000000000000006e200000000000000000000000000000000000000000000000000000000000006ec00000000000000000000000000000000000000000000000000000000000006e300000000000000000000000000000000000000000000000000000000000006ed00000000000000000000000000000000000000000000000000000000000006e400000000000000000000000000000000000000000000000000000000000006ee00000000000000000000000000000000000000000000000000000000000006e500000000000000000000000000000000000000000000000000000000000006ef00000000000000000000000000000000000000000000000000000000000006e600000000000000000000000000000000000000000000000000000000000006f000000000000000000000000000000000000000000000000000000000000006e700000000000000000000000000000000000000000000000000000000000006f100000000000000000000000000000000000000000000000000000000000006e800000000000000000000000000000000000000000000000000000000000006f200000000000000000000000000000000000000000000000000000000000006e900000000000000000000000000000000000000000000000000000000000006f300000000000000000000000000000000000000000000000000000000000006ea00000000000000000000000000000000000000000000000000000000000006f400000000000000000000000000000000000000000000000000000000000006eb00000000000000000000000000000000000000000000000000000000000006f500000000000000000000000000000000000000000000000000000000000006ec00000000000000000000000000000000000000000000000000000000000006f600000000000000000000000000000000000000000000000000000000000006ed00000000000000000000000000000000000000000000000000000000000006f700000000000000000000000000000000000000000000000000000000000006ee00000000000000000000000000000000000000000000000000000000000006f800000000000000000000000000000000000000000000000000000000000006ef00000000000000000000000000000000000000000000000000000000000006f900000000000000000000000000000000000000000000000000000000000006f000000000000000000000000000000000000000000000000000000000000006fa00000000000000000000000000000000000000000000000000000000000006f100000000000000000000000000000000000000000000000000000000000006fb00000000000000000000000000000000000000000000000000000000000006f200000000000000000000000000000000000000000000000000000000000006fc00000000000000000000000000000000000000000000000000000000000006f300000000000000000000000000000000000000000000000000000000000006fd00000000000000000000000000000000000000000000000000000000000006f400000000000000000000000000000000000000000000000000000000000006fe00000000000000000000000000000000000000000000000000000000000006f500000000000000000000000000000000000000000000000000000000000006ff00000000000000000000000000000000000000000000000000000000000006f6000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006f7000000000000000000000000000000000000000000000000000000000000070100000000000000000000000000000000000000000000000000000000000006f8000000000000000000000000000000000000000000000000000000000000070200000000000000000000000000000000000000000000000000000000000006f9000000000000000000000000000000000000000000000000000000000000070300000000000000000000000000000000000000000000000000000000000006fa000000000000000000000000000000000000000000000000000000000000070400000000000000000000000000000000000000000000000000000000000006fb000000000000000000000000000000000000000000000000000000000000070500000000000000000000000000000000000000000000000000000000000006fc000000000000000000000000000000000000000000000000000000000000070600000000000000000000000000000000000000000000000000000000000006fd000000000000000000000000000000000000000000000000000000000000070700000000000000000000000000000000000000000000000000000000000006fe0000000000000000000000000000000000000000000000000000000000000708000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e000000000000000000000000000000000000000000000000000000000000033f3f0000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000040100000000000000000000000000000000000000000000000000000000000004020000000000000000000000000000000000000000000000000000000000000403000000000000000000000000000000000000000000000000000000000000040400000000000000000000000000000000000000000000000000000000000004050000000000000000000000000000000000000000000000000000000000000406000000000000000000000000000000000000000000000000000000000000040700000000000000000000000000000000000000000000000000000000000004080000000000000000000000000000000000000000000000000000000000000409000000000000000000000000000000000000000000000000000000000000040a000000000000000000000000000000000000000000000000000000000000040b000000000000000000000000000000000000000000000000000000000000040c000000000000000000000000000000000000000000000000000000000000040d000000000000000000000000000000000000000000000000000000000000040e000000000000000000000000000000000000000000000000000000000000040f0000000000000000000000000000000000000000000000000000000000000410000000000000000000000000000000000000000000000000000000000000041100000000000000000000000000000000000000000000000000000000000004120000000000000000000000000000000000000000000000000000000000000413000000000000000000000000000000000000000000000000000000000000041400000000000000000000000000000000000000000000000000000000000004150000000000000000000000000000000000000000000000000000000000000416000000000000000000000000000000000000000000000000000000000000041700000000000000000000000000000000000000000000000000000000000004180000000000000000000000000000000000000000000000000000000000000419000000000000000000000000000000000000000000000000000000000000041a000000000000000000000000000000000000000000000000000000000000041b000000000000000000000000000000000000000000000000000000000000041c000000000000000000000000000000000000000000000000000000000000041d000000000000000000000000000000000000000000000000000000000000041e000000000000000000000000000000000000000000000000000000000000041f0000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000042100000000000000000000000000000000000000000000000000000000000004220000000000000000000000000000000000000000000000000000000000000423000000000000000000000000000000000000000000000000000000000000042400000000000000000000000000000000000000000000000000000000000004250000000000000000000000000000000000000000000000000000000000000426000000000000000000000000000000000000000000000000000000000000042700000000000000000000000000000000000000000000000000000000000004280000000000000000000000000000000000000000000000000000000000000429000000000000000000000000000000000000000000000000000000000000042a000000000000000000000000000000000000000000000000000000000000042b000000000000000000000000000000000000000000000000000000000000042c000000000000000000000000000000000000000000000000000000000000042d000000000000000000000000000000000000000000000000000000000000042e000000000000000000000000000000000000000000000000000000000000042f0000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000000000043100000000000000000000000000000000000000000000000000000000000004320000000000000000000000000000000000000000000000000000000000000433000000000000000000000000000000000000000000000000000000000000043400000000000000000000000000000000000000000000000000000000000004350000000000000000000000000000000000000000000000000000000000000436000000000000000000000000000000000000000000000000000000000000043700000000000000000000000000000000000000000000000000000000000004380000000000000000000000000000000000000000000000000000000000000439000000000000000000000000000000000000000000000000000000000000043a000000000000000000000000000000000000000000000000000000000000043b000000000000000000000000000000000000000000000000000000000000043c000000000000000000000000000000000000000000000000000000000000043d000000000000000000000000000000000000000000000000000000000000043e0800a38df4d53fe5da3d48c97dd0f58e7a52faade40c8f50bc0f6408a2b5b3829800a3d4b994758c6630518cce3116391874b9b3b8a1bbcb36485d2702f05af359004c06e9dd14418667d7c66384699665f9222ed08be87c67293a2f1b6b4c41aa006b77b11703f0e3d21aa68c9f6d2ae8ad94ecafdb56be0c4605a0c6723e9ddc0049a087e215b640d8360f8dd98e07c77e4d2025a0dcdcaf78bc00bd6fde09680014741a52013a10622de90b1a7854f1203af5c97121a4b3774c249fba6e0dba00c783bd01c4a4cafe83fd3b4d1ce0c555be4b6d1f772cccccf86049f9af0bbe002ff3ac4d7e2f1caf1fb30e1d20eba5be2b6a42dd54690dee988a934026491e3f0000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000070a0000000000000000000000000000000000000000000000000000000000000701000000000000000000000000000000000000000000000000000000000000070b0000000000000000000000000000000000000000000000000000000000000702000000000000000000000000000000000000000000000000000000000000070c0000000000000000000000000000000000000000000000000000000000000703000000000000000000000000000000000000000000000000000000000000070d0000000000000000000000000000000000000000000000000000000000000704000000000000000000000000000000000000000000000000000000000000070e0000000000000000000000000000000000000000000000000000000000000705000000000000000000000000000000000000000000000000000000000000070f00000000000000000000000000000000000000000000000000000000000007060000000000000000000000000000000000000000000000000000000000000710000000000000000000000000000000000000000000000000000000000000070700000000000000000000000000000000000000000000000000000000000007110000000000000000000000000000000000000000000000000000000000000708000000000000000000000000000000000000000000000000000000000000071200000000000000000000000000000000000000000000000000000000000007090000000000000000000000000000000000000000000000000000000000000713000000000000000000000000000000000000000000000000000000000000070a0000000000000000000000000000000000000000000000000000000000000714000000000000000000000000000000000000000000000000000000000000070b0000000000000000000000000000000000000000000000000000000000000715000000000000000000000000000000000000000000000000000000000000070c0000000000000000000000000000000000000000000000000000000000000716000000000000000000000000000000000000000000000000000000000000070d0000000000000000000000000000000000000000000000000000000000000717000000000000000000000000000000000000000000000000000000000000070e0000000000000000000000000000000000000000000000000000000000000718000000000000000000000000000000000000000000000000000000000000070f00000000000000000000000000000000000000000000000000000000000007190000000000000000000000000000000000000000000000000000000000000710000000000000000000000000000000000000000000000000000000000000071a0000000000000000000000000000000000000000000000000000000000000711000000000000000000000000000000000000000000000000000000000000071b0000000000000000000000000000000000000000000000000000000000000712000000000000000000000000000000000000000000000000000000000000071c0000000000000000000000000000000000000000000000000000000000000713000000000000000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000714000000000000000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000715000000000000000000000000000000000000000000000000000000000000071f00000000000000000000000000000000000000000000000000000000000007160000000000000000000000000000000000000000000000000000000000000720000000000000000000000000000000000000000000000000000000000000071700000000000000000000000000000000000000000000000000000000000007210000000000000000000000000000000000000000000000000000000000000718000000000000000000000000000000000000000000000000000000000000072200000000000000000000000000000000000000000000000000000000000007190000000000000000000000000000000000000000000000000000000000000723000000000000000000000000000000000000000000000000000000000000071a0000000000000000000000000000000000000000000000000000000000000724000000000000000000000000000000000000000000000000000000000000071b0000000000000000000000000000000000000000000000000000000000000725000000000000000000000000000000000000000000000000000000000000071c0000000000000000000000000000000000000000000000000000000000000726000000000000000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000727000000000000000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000728000000000000000000000000000000000000000000000000000000000000071f00000000000000000000000000000000000000000000000000000000000007290000000000000000000000000000000000000000000000000000000000000720000000000000000000000000000000000000000000000000000000000000072a0000000000000000000000000000000000000000000000000000000000000721000000000000000000000000000000000000000000000000000000000000072b0000000000000000000000000000000000000000000000000000000000000722000000000000000000000000000000000000000000000000000000000000072c0000000000000000000000000000000000000000000000000000000000000723000000000000000000000000000000000000000000000000000000000000072d0000000000000000000000000000000000000000000000000000000000000724000000000000000000000000000000000000000000000000000000000000072e0000000000000000000000000000000000000000000000000000000000000725000000000000000000000000000000000000000000000000000000000000072f00000000000000000000000000000000000000000000000000000000000007260000000000000000000000000000000000000000000000000000000000000730000000000000000000000000000000000000000000000000000000000000072700000000000000000000000000000000000000000000000000000000000007310000000000000000000000000000000000000000000000000000000000000728000000000000000000000000000000000000000000000000000000000000073200000000000000000000000000000000000000000000000000000000000007290000000000000000000000000000000000000000000000000000000000000733000000000000000000000000000000000000000000000000000000000000072a0000000000000000000000000000000000000000000000000000000000000734000000000000000000000000000000000000000000000000000000000000072b0000000000000000000000000000000000000000000000000000000000000735000000000000000000000000000000000000000000000000000000000000072c0000000000000000000000000000000000000000000000000000000000000736000000000000000000000000000000000000000000000000000000000000072d0000000000000000000000000000000000000000000000000000000000000737000000000000000000000000000000000000000000000000000000000000072e0000000000000000000000000000000000000000000000000000000000000738000000000000000000000000000000000000000000000000000000000000072f00000000000000000000000000000000000000000000000000000000000007390000000000000000000000000000000000000000000000000000000000000730000000000000000000000000000000000000000000000000000000000000073a0000000000000000000000000000000000000000000000000000000000000731000000000000000000000000000000000000000000000000000000000000073b0000000000000000000000000000000000000000000000000000000000000732000000000000000000000000000000000000000000000000000000000000073c0000000000000000000000000000000000000000000000000000000000000733000000000000000000000000000000000000000000000000000000000000073d0000000000000000000000000000000000000000000000000000000000000734000000000000000000000000000000000000000000000000000000000000073e0000000000000000000000000000000000000000000000000000000000000735000000000000000000000000000000000000000000000000000000000000073f00000000000000000000000000000000000000000000000000000000000007360000000000000000000000000000000000000000000000000000000000000740000000000000000000000000000000000000000000000000000000000000073700000000000000000000000000000000000000000000000000000000000007410000000000000000000000000000000000000000000000000000000000000738000000000000000000000000000000000000000000000000000000000000074200000000000000000000000000000000000000000000000000000000000007390000000000000000000000000000000000000000000000000000000000000743000000000000000000000000000000000000000000000000000000000000073a0000000000000000000000000000000000000000000000000000000000000744000000000000000000000000000000000000000000000000000000000000073b0000000000000000000000000000000000000000000000000000000000000745000000000000000000000000000000000000000000000000000000000000073c0000000000000000000000000000000000000000000000000000000000000746000000000000000000000000000000000000000000000000000000000000073d0000000000000000000000000000000000000000000000000000000000000747000000000000000000000000000000000000000000000000000000000000073e0000000000000000000000000000000000000000000000000000000000000748000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "txsEffectsHash": "0x008e46703a73fee39cb8a1bd50a03e090eb250de227639bbf1448462452bd646", "decodedHeader": { @@ -72,10 +72,10 @@ "blockNumber": 2, "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000055", "chainId": 31337, - "timestamp": 1724857947, + "timestamp": 1724860902, "version": 1, - "coinbase": "0x210c97d1d20ae59929bf8ac222b5508cd9c37d2a", - "feeRecipient": "0x2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43", + "coinbase": "0x9ca40d2eb00ca73819f826b0a788dd9891e21d13", + "feeRecipient": "0x07803e414075315a9195dd8f9ef9154cc40db0c0a4cb08aa98e253635741c0f2", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -83,7 +83,7 @@ }, "lastArchive": { "nextAvailableLeafIndex": 2, - "root": "0x16ff2b46f1343feaee80c00c6b6c8e4a403a5027f1a8127caa5e7c7d67b0e96a" + "root": "0x1c4782ca647f1ed559bce9f625bfae6a561ce96481c49d9b31ce4df8f46124b1" }, "stateReference": { "l1ToL2MessageTree": { @@ -106,8 +106,8 @@ } } }, - "header": "0x16ff2b46f1343feaee80c00c6b6c8e4a403a5027f1a8127caa5e7c7d67b0e96a000000020000000000000000000000000000000000000000000000000000000000000004008e46703a73fee39cb8a1bd50a03e090eb250de227639bbf1448462452bd64600212ff46db74e06c26240f9a92fb6fea84709380935d657361bbd5bcb89193700d4b436f0c9857646ed7cf0836bd61c857183956d1acefe52fc99ef7b333a38224c43ed89fb9404e06e7382170d1e279a53211bab61876f38d8a4180390b7ad0000002017752a4346cf34b18277458ace73be4895316cb1c3cbce628d573d5d10cde7ce00000200152db065a479b5630768d6c5250bb6233e71729f857c16cffa98569acf90a2bf000002800a020b31737a919cbd6b0c0fe25d466a11e2186eb8038cd63a5e7d2900473d53000002800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000066cf3e5b210c97d1d20ae59929bf8ac222b5508cd9c37d2a2f25da389d6b307aa2ba1bab55c9a7eb540ba04f28dcdeefa749277eb8c17d43000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x004a7a3146709849c48af3458b15956b28962ea0938fd6dd655fbb81f6e27222", + "header": "0x1c4782ca647f1ed559bce9f625bfae6a561ce96481c49d9b31ce4df8f46124b1000000020000000000000000000000000000000000000000000000000000000000000004008e46703a73fee39cb8a1bd50a03e090eb250de227639bbf1448462452bd64600212ff46db74e06c26240f9a92fb6fea84709380935d657361bbd5bcb89193700d4b436f0c9857646ed7cf0836bd61c857183956d1acefe52fc99ef7b333a38224c43ed89fb9404e06e7382170d1e279a53211bab61876f38d8a4180390b7ad0000002017752a4346cf34b18277458ace73be4895316cb1c3cbce628d573d5d10cde7ce00000200152db065a479b5630768d6c5250bb6233e71729f857c16cffa98569acf90a2bf000002800a020b31737a919cbd6b0c0fe25d466a11e2186eb8038cd63a5e7d2900473d53000002800000000000000000000000000000000000000000000000000000000000007a690000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000066cf49e69ca40d2eb00ca73819f826b0a788dd9891e21d1307803e414075315a9195dd8f9ef9154cc40db0c0a4cb08aa98e253635741c0f2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x00450d818debc92707ef43bd43269cfe23681e8eb7485ce97cac5fe88c08bb91", "numTxs": 4 } } \ No newline at end of file diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index e1bdd04b338..98d1e156cd4 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -15,12 +15,12 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "TIME_TRAVELER=${TIME_TRAVELER:-true} LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", - "test:profile": "TIME_TRAVELER=${TIME_TRAVELER:-true} LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 0x --output-dir \"flame_graph/{pid}.0x\" -- node --experimental-vm-modules ../node_modules/jest/bin/jest.js --runInBand --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test:profile": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 0x --output-dir \"flame_graph/{pid}.0x\" -- node --experimental-vm-modules ../node_modules/jest/bin/jest.js --runInBand --testTimeout=300000 --forceExit", "serve:flames": "python3 -m http.server --directory \"flame_graph\" 8000", - "test:debug": "TIME_TRAVELER=${TIME_TRAVELER:-true} LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --inspect --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", - "test:integration": "TIME_TRAVELER=${TIME_TRAVELER:-true} concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", - "test:integration:run": "TIME_TRAVELER=${TIME_TRAVELER:-true} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --no-cache --runInBand --config jest.integration.config.json", + "test:debug": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --inspect --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", + "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --no-cache --runInBand --config jest.integration.config.json", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" }, "dependencies": { diff --git a/yarn-project/end-to-end/package.local.json b/yarn-project/end-to-end/package.local.json index c76111c3c26..62f136fa45d 100644 --- a/yarn-project/end-to-end/package.local.json +++ b/yarn-project/end-to-end/package.local.json @@ -2,7 +2,7 @@ "scripts": { "build": "yarn clean && tsc -b && webpack", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", - "test": "TIME_TRAVELER=${TIME_TRAVELER:-true} LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" } } \ No newline at end of file diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 563f6cdd84d..661a4e36dc6 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -170,7 +170,6 @@ describe('L1Publisher integration', () => { publisherPrivateKey: sequencerPK, l1PublishRetryIntervalMS: 100, l1ChainId: 31337, - timeTraveler: true, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/fixtures/index.ts b/yarn-project/end-to-end/src/fixtures/index.ts index 146faa68d45..270da9ae54c 100644 --- a/yarn-project/end-to-end/src/fixtures/index.ts +++ b/yarn-project/end-to-end/src/fixtures/index.ts @@ -1,3 +1,4 @@ export * from './fixtures.js'; export * from './logging.js'; export * from './utils.js'; +export * from './watcher.js'; diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index a00fe468316..6b821026179 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -41,6 +41,7 @@ import { getACVMConfig } from './get_acvm_config.js'; import { getBBConfig } from './get_bb_config.js'; import { setupL1Contracts } from './setup_l1_contracts.js'; import { deployCanonicalAuthRegistry, deployCanonicalKeyRegistry, getPrivateKeyFromIndex } from './utils.js'; +import { Watcher } from './watcher.js'; export type SubsystemsContext = { anvil: Anvil; @@ -51,6 +52,7 @@ export type SubsystemsContext = { pxe: PXEService; deployL1ContractsValues: DeployL1Contracts; proverNode: ProverNode; + watcher: Watcher; }; type SnapshotEntry = { @@ -229,6 +231,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.pxe.stop(); await context.acvmConfig?.cleanup(); await context.anvil.stop(); + await context.watcher.stop(); } export async function createAndSyncProverNode( @@ -338,6 +341,13 @@ async function setupFromFresh( aztecNode, ); + const watcher = new Watcher( + new EthCheatCodes(aztecNodeConfig.l1RpcUrl), + deployL1ContractsValues.l1ContractAddresses.rollupAddress, + deployL1ContractsValues.publicClient, + ); + watcher.start(); + logger.verbose('Creating pxe...'); const pxeConfig = getPXEServiceConfig(); pxeConfig.dataDirectory = statePath; @@ -365,6 +375,7 @@ async function setupFromFresh( bbConfig, deployL1ContractsValues, proverNode, + watcher, }; } @@ -408,6 +419,13 @@ async function setupFromState(statePath: string, logger: Logger): Promise 0 ? await createAccounts(pxe, numberOfAccounts) : []; const cheatCodes = CheatCodes.create(config.l1RpcUrl, pxe!); @@ -469,6 +477,7 @@ export async function setup( } await anvil?.stop(); + await watcher.stop(); }; return { diff --git a/yarn-project/end-to-end/src/fixtures/watcher.ts b/yarn-project/end-to-end/src/fixtures/watcher.ts new file mode 100644 index 00000000000..0d254fbf5ff --- /dev/null +++ b/yarn-project/end-to-end/src/fixtures/watcher.ts @@ -0,0 +1,64 @@ +import { type DebugLogger, type EthCheatCodes, createDebugLogger } from '@aztec/aztec.js'; +import { type EthAddress } from '@aztec/circuits.js'; +import { RunningPromise } from '@aztec/foundation/running-promise'; +import { RollupAbi } from '@aztec/l1-artifacts'; + +import { type GetContractReturnType, type HttpTransport, type PublicClient, getAddress, getContract } from 'viem'; +import type * as chains from 'viem/chains'; + +export class Watcher { + private rollup: GetContractReturnType>; + + private filledRunningPromise?: RunningPromise; + + private logger: DebugLogger = createDebugLogger(`aztec:utils:watcher`); + + constructor( + private cheatcodes: EthCheatCodes, + rollupAddress: EthAddress, + publicClient: PublicClient, + ) { + this.rollup = getContract({ + address: getAddress(rollupAddress.toString()), + abi: RollupAbi, + client: publicClient, + }); + + this.logger.info(`Watcher created for rollup at ${rollupAddress}`); + } + + start() { + if (this.filledRunningPromise) { + throw new Error('Watcher already watching for filled slot'); + } + this.filledRunningPromise = new RunningPromise(() => this.mineIfSlotFilled(), 1000); + this.filledRunningPromise.start(); + this.logger.info(`Watcher started`); + } + + async stop() { + await this.filledRunningPromise?.stop(); + } + + async mineIfSlotFilled() { + try { + const currentSlot = await this.rollup.read.getCurrentSlot(); + const pendingBlockNumber = BigInt(await this.rollup.read.pendingBlockCount()) - 1n; + const [, , lastSlotNumber] = await this.rollup.read.blocks([pendingBlockNumber]); + + if (currentSlot === lastSlotNumber) { + // We should jump to the next slot + const timestamp = await this.rollup.read.getTimestampForSlot([currentSlot + 1n]); + try { + await this.cheatcodes.warp(Number(timestamp)); + } catch (e) { + this.logger.error(`Failed to warp to timestamp ${timestamp}: ${e}`); + } + + this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`); + } + } catch (err) { + this.logger.error('mineIfSlotFilled failed', err); + } + } +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 7f034f51296..2d9a5029351 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -63,7 +63,6 @@ export type EnvVar = | 'SEQ_PUBLISHER_PRIVATE_KEY' | 'SEQ_REQUIRED_CONFIRMATIONS' | 'SEQ_PUBLISH_RETRY_INTERVAL_MS' - | 'TIME_TRAVELER' | 'VERSION' | 'SEQ_DISABLED' | 'PROVER_DISABLED' diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index b31fdd3b251..bfe5eb42943 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -1,5 +1,5 @@ import { type L1ReaderConfig, NULL_KEY } from '@aztec/ethereum'; -import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; /** * The configuration of the rollup transaction publisher. @@ -24,11 +24,6 @@ export interface PublisherConfig { * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; - /** - * Whether the publisher is a time traveler and can warp the underlying chain - * @todo #8153 - Remove this flag once the time traveler is removed - */ - timeTraveler: boolean; } export const getTxSenderConfigMappings: ( @@ -69,11 +64,6 @@ export const getPublisherConfigMappings: (scope: 'PROVER' | 'SEQ') => ConfigMapp defaultValue: 1000, description: 'The interval to wait between publish retries.', }, - timeTraveler: { - env: `TIME_TRAVELER`, - description: 'Whether the publisher is a time traveler and can warp the underlying chain', - ...booleanConfigHelper(), - }, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 33e0dcc7325..5fbce74c286 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -1,4 +1,4 @@ -import { L2Block } from '@aztec/circuit-types'; +import { L2Block, type ViemSignature } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; import { sleep } from '@aztec/foundation/sleep'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -27,7 +27,7 @@ class MockAvailabilityOracle { interface MockPublicClient { getTransactionReceipt: ({ hash }: { hash: '0x${string}' }) => Promise; - getBlock(): Promise<{ timestamp: number }>; + getBlock(): Promise<{ timestamp: bigint }>; getTransaction: ({ hash }: { hash: '0x${string}' }) => Promise<{ input: `0x${string}`; hash: `0x${string}` }>; } @@ -46,6 +46,15 @@ interface MockRollupContractWrite { interface MockRollupContractRead { archive: () => Promise<`0x${string}`>; getCurrentSlot(): Promise; + validateHeader: ( + args: readonly [ + `0x${string}`, + ViemSignature[], + `0x${string}`, + bigint, + { ignoreDA: boolean; ignoreSignatures: boolean }, + ], + ) => Promise; } class MockRollupContract { @@ -131,7 +140,6 @@ describe('L1Publisher', () => { rollupAddress: EthAddress.ZERO.toString(), }, l1PublishRetryIntervalMS: 1, - timeTraveller: false, } as unknown as TxSenderConfig & PublisherConfig; publisher = new L1Publisher(config, new NoopTelemetryClient()); @@ -143,6 +151,7 @@ describe('L1Publisher', () => { account = (publisher as any)['account']; rollupContractRead.getCurrentSlot.mockResolvedValue(l2Block.header.globalVariables.slotNumber.toBigInt()); + publicClient.getBlock.mockResolvedValue({ timestamp: 12n }); }); it('publishes and process l2 block to l1', async () => { @@ -161,7 +170,9 @@ describe('L1Publisher', () => { `0x${blockHash.toString('hex')}`, `0x${body.toString('hex')}`, ] as const; - expect(rollupContractSimulate.publishAndProcess).toHaveBeenCalledWith(args, { account: account }); + if (!L1Publisher.SKIP_SIMULATION) { + expect(rollupContractSimulate.publishAndProcess).toHaveBeenCalledWith(args, { account: account }); + } expect(rollupContractWrite.publishAndProcess).toHaveBeenCalledWith(args, { account: account }); expect(publicClient.getTransactionReceipt).toHaveBeenCalledWith({ hash: publishAndProcessTxHash }); }); @@ -181,7 +192,9 @@ describe('L1Publisher', () => { `0x${archive.toString('hex')}`, `0x${blockHash.toString('hex')}`, ] as const; - expect(rollupContractSimulate.process).toHaveBeenCalledWith(args, { account }); + if (!L1Publisher.SKIP_SIMULATION) { + expect(rollupContractSimulate.process).toHaveBeenCalledWith(args, { account }); + } expect(rollupContractWrite.process).toHaveBeenCalledWith(args, { account }); expect(publicClient.getTransactionReceipt).toHaveBeenCalledWith({ hash: processTxHash }); }); @@ -208,10 +221,18 @@ describe('L1Publisher', () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractSimulate.publishAndProcess.mockRejectedValueOnce(new Error()); + if (L1Publisher.SKIP_SIMULATION) { + rollupContractRead.validateHeader.mockRejectedValueOnce(new Error('Test error')); + } + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(false); - expect(rollupContractSimulate.publishAndProcess).toHaveBeenCalledTimes(1); + if (!L1Publisher.SKIP_SIMULATION) { + expect(rollupContractSimulate.publishAndProcess).toHaveBeenCalledTimes(1); + } + expect(rollupContractRead.validateHeader).toHaveBeenCalledTimes(1); + expect(rollupContractWrite.publishAndProcess).toHaveBeenCalledTimes(0); }); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 9ec67f80d94..9bb8316dd08 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,14 +1,6 @@ -import { EthCheatCodes } from '@aztec/aztec.js'; import { type L2Block, type Signature } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; -import { - AZTEC_SLOT_DURATION, - ETHEREUM_SLOT_DURATION, - EthAddress, - GENESIS_ARCHIVE_ROOT, - type Header, - type Proof, -} from '@aztec/circuits.js'; +import { ETHEREUM_SLOT_DURATION, EthAddress, GENESIS_ARCHIVE_ROOT, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -103,15 +95,6 @@ export type MetadataForSlot = { archive: Buffer; }; -// @note If we want to simulate in the future, we have to skip the viem simulations and use `reads` instead -// This is because the viem simulations are not able to simulate the future, only the current state. -// This means that we will be simulating as if `block.timestamp` is the same for the next block -// as for the last block. -// Nevertheless, it can be quite useful for figuring out why exactly the transaction is failing -// as a middle ground right now, we will be skipping the simulation and just sending the transaction -// but only after we have done a successful run of the `validateHeader` for the timestamp in the future. -const SKIP_SIMULATION = true; - /** * Publishes L2 blocks to L1. This implementation does *not* retry a transaction in * the event of network congestion, but should work for local development. @@ -121,9 +104,17 @@ const SKIP_SIMULATION = true; * Adapted from https://github.com/AztecProtocol/aztec2-internal/blob/master/falafel/src/rollup_publisher.ts. */ export class L1Publisher { + // @note If we want to simulate in the future, we have to skip the viem simulations and use `reads` instead + // This is because the viem simulations are not able to simulate the future, only the current state. + // This means that we will be simulating as if `block.timestamp` is the same for the next block + // as for the last block. + // Nevertheless, it can be quite useful for figuring out why exactly the transaction is failing + // as a middle ground right now, we will be skipping the simulation and just sending the transaction + // but only after we have done a successful run of the `validateHeader` for the timestamp in the future. + public static SKIP_SIMULATION = true; + private interruptibleSleep = new InterruptibleSleep(); private sleepTimeMs: number; - private timeTraveler: boolean; private interrupted = false; private metrics: L1PublisherMetrics; private log = createDebugLogger('aztec:sequencer:publisher'); @@ -139,11 +130,8 @@ export class L1Publisher { private publicClient: PublicClient; private account: PrivateKeyAccount; - private ethCheatCodes: EthCheatCodes; - constructor(config: TxSenderConfig & PublisherConfig, client: TelemetryClient) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; - this.timeTraveler = config?.timeTraveler ?? false; this.metrics = new L1PublisherMetrics(client, 'L1Publisher'); const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; @@ -170,22 +158,21 @@ export class L1Publisher { abi: RollupAbi, client: walletClient, }); - - this.ethCheatCodes = new EthCheatCodes(rpcUrl); - } - - public async amIAValidator(): Promise { - return await this.rollupContract.read.isValidator([this.account.address]); - } - - public async getValidatorCount(): Promise { - return BigInt(await this.rollupContract.read.getValidatorCount()); } public getSenderAddress(): Promise { return Promise.resolve(EthAddress.fromString(this.account.address)); } + /** + * @notice Calls `canProposeAtTime` with the time of the next Ethereum block and the sender address + * + * @dev Throws if unable to propose + * + * @param archive - The archive that we expect to be current state + * @return slot - The L2 slot number of the next Ethereum block, + * @return blockNumber - The L2 block number of the next L2 block + */ public async canProposeAtNextEthBlock(archive: Buffer): Promise<[bigint, bigint]> { const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); const [slot, blockNumber] = await this.rollupContract.read.canProposeAtTime([ @@ -200,7 +187,7 @@ export class L1Publisher { header: Header, digest: Buffer = new Fr(GENESIS_ARCHIVE_ROOT).toBuffer(), attestations: Signature[] = [], - ): Promise { + ): Promise { const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); const formattedAttestations = attestations.map(attest => attest.toViemSignature()); @@ -216,6 +203,7 @@ export class L1Publisher { try { await this.rollupContract.read.validateHeader(args, { account: this.account }); + return true; } catch (error: unknown) { // Specify the type of error if (error instanceof ContractFunctionRevertedError) { @@ -224,7 +212,7 @@ export class L1Publisher { } else { this.log.debug(`Unexpected error during validation: ${error}`); } - throw error; // Re-throw the error if needed + return false; } } @@ -287,7 +275,9 @@ export class L1Publisher { // This means that we can avoid the simulation issues in later checks. // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which // make time consistency checks break. - await this.validateBlockForSubmission(block.header, block.archive.root.toBuffer(), attestations); + if (!(await this.validateBlockForSubmission(block.header, block.archive.root.toBuffer(), attestations))) { + return false; + } const processTxArgs = { header: block.header.toBuffer(), @@ -332,8 +322,6 @@ export class L1Publisher { this.log.info(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); - await this.commitTimeJump(block.header.globalVariables.slotNumber.toBigInt() + 1n); - return true; } @@ -347,49 +335,6 @@ export class L1Publisher { return false; } - async commitTimeJump(slot: bigint) { - // @note So, we are cheating a bit here. Since the tests are running anvil auto-mine - // no blocks are coming around unless we do something, and since we cannot push - // more blocks within the same slot (when `IS_DEV_NET = false`), we can just - // fast forward the anvil chain such that the next block will be the one we need. - // this means that we are forwarding to time of the next slot - 12 seconds such that - // the NEXT ethereum block will be within the new slot. - // If the slot duration of L2 is just 1 L1 slot, then this will not do anything, as - // the time to jump to is current time. - // - // Time jumps only allowed into the future. - // - // If this is run on top of a real chain, the time lords will catch you. - // - // @todo #8153 - - if (!this.timeTraveler) { - return; - } - - // If the aztec slot duration is same length as the ethereum slot duration, we don't need to do anything - if ((ETHEREUM_SLOT_DURATION as number) === (AZTEC_SLOT_DURATION as number)) { - return; - } - - const [currentTime, timeStampForSlot] = await Promise.all([ - this.ethCheatCodes.timestamp(), - this.rollupContract.read.getTimestampForSlot([slot]), - ]); - - // @note We progress the time to the next slot AND mine the block. - // This means that the next effective block will be ETHEREUM_SLOT_DURATION after that. - // This will cause issues if slot duration is equal to one (1) L1 slot, and sequencer selection is run - // The reason is that simulations on ANVIL cannot be run with timestamp + x, so we need to "BE" there. - // @todo #8110 - const timestamp = timeStampForSlot; // - BigInt(ETHEREUM_SLOT_DURATION); - - if (timestamp > currentTime) { - this.log.info(`Committing time jump to slot ${slot}`); - await this.ethCheatCodes.warp(Number(timestamp)); - } - } - public async submitProof( header: Header, archiveRoot: Fr, @@ -472,7 +417,7 @@ export class L1Publisher { `0x${proof.toString('hex')}`, ] as const; - if (!SKIP_SIMULATION) { + if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.submitBlockRootProof(args, { account: this.account, }); @@ -520,7 +465,7 @@ export class L1Publisher { attestations, ] as const; - if (!SKIP_SIMULATION) { + if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.process(args, { account: this.account }); } @@ -534,7 +479,7 @@ export class L1Publisher { `0x${encodedData.blockHash.toString('hex')}`, ] as const; - if (!SKIP_SIMULATION) { + if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.process(args, { account: this.account }); } return await this.rollupContract.write.process(args, { @@ -561,7 +506,7 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - if (!SKIP_SIMULATION) { + if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.publishAndProcess(args, { account: this.account, }); @@ -578,7 +523,7 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - if (!SKIP_SIMULATION) { + if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.publishAndProcess(args, { account: this.account, }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 9ee5c089474..24a52633326 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -41,7 +41,7 @@ import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; import { type BlockBuilderFactory } from '../block_builder/index.js'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; -import { type L1Publisher, type MetadataForSlot } from '../publisher/l1-publisher.js'; +import { type L1Publisher } from '../publisher/l1-publisher.js'; import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { Sequencer } from './sequencer.js'; @@ -89,24 +89,19 @@ describe('sequencer', () => { }; let block: L2Block; - let metadata: MetadataForSlot; beforeEach(() => { lastBlockNumber = 0; block = L2Block.random(lastBlockNumber + 1); - metadata = { - proposer: EthAddress.ZERO, - slot: block.header.globalVariables.slotNumber.toBigInt(), - pendingBlockNumber: BigInt(lastBlockNumber), - archive: block.header.lastArchive.toBuffer(), - }; - publisher = mock(); publisher.getCurrentEpochCommittee.mockResolvedValue(committee); - publisher.getMetadataForSlotAtNextEthBlock.mockResolvedValue(metadata); - publisher.getValidatorCount.mockResolvedValue(0n); + publisher.canProposeAtNextEthBlock.mockResolvedValue([ + block.header.globalVariables.slotNumber.toBigInt(), + block.header.globalVariables.blockNumber.toBigInt(), + ]); + publisher.validateBlockForSubmission.mockResolvedValue(true); globalVariableBuilder = mock(); merkleTreeOps = mock(); @@ -212,6 +207,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); + // Ok, we have an issue that we never actually call the process L2 block expect(publisher.processL2Block).toHaveBeenCalledTimes(1); expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); @@ -227,7 +223,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockReturnValueOnce([tx]); + p2p.getTxs.mockReturnValue([tx]); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); @@ -243,21 +239,28 @@ describe('sequencer', () => { gasFees, ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); // Not your turn! - const publisherAddress = EthAddress.random(); - publisher.getSenderAddress.mockResolvedValue(publisherAddress); - publisher.getMetadataForSlotAtNextEthBlock.mockResolvedValue({ ...metadata, proposer: EthAddress.random() }); - // Specify that there is a validator, such that we don't allow everyone to publish - publisher.getValidatorCount.mockResolvedValueOnce(1n); + publisher.canProposeAtNextEthBlock.mockRejectedValue(new Error()); + publisher.validateBlockForSubmission.mockResolvedValue(false); await sequencer.initialSync(); await sequencer.work(); expect(blockSimulator.startNewBlock).not.toHaveBeenCalled(); + // Now we can propose, but lets assume that the content is still "bad" (missing sigs etc) + publisher.canProposeAtNextEthBlock.mockResolvedValue([ + block.header.globalVariables.slotNumber.toBigInt(), + block.header.globalVariables.blockNumber.toBigInt(), + ]); + + await sequencer.work(); + expect(blockSimulator.startNewBlock).not.toHaveBeenCalled(); + // Now it is! - publisher.getMetadataForSlotAtNextEthBlock.mockResolvedValue({ ...metadata, proposer: publisherAddress }); + publisher.validateBlockForSubmission.mockClear(); + publisher.validateBlockForSubmission.mockResolvedValue(true); await sequencer.work(); expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( @@ -627,13 +630,11 @@ describe('sequencer', () => { await sequencer.initialSync(); - l2BlockSource.getBlockNumber - // let it work for a bit - .mockResolvedValueOnce(lastBlockNumber) - .mockResolvedValueOnce(lastBlockNumber) - .mockResolvedValueOnce(lastBlockNumber) - // then tell it to abort - .mockResolvedValue(lastBlockNumber + 1); + // This could practically be for any reason, e.g., could also be that we have entered a new slot. + publisher.validateBlockForSubmission + .mockResolvedValueOnce(true) + .mockResolvedValueOnce(true) + .mockResolvedValueOnce(false); await sequencer.work(); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 122df289b8d..2b8fa1ac1bf 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -389,6 +389,10 @@ export class Sequencer { proposalHeader: Header, historicalHeader: Header | undefined, ): Promise { + if (!(await this.publisher.validateBlockForSubmission(proposalHeader))) { + return; + } + const newGlobalVariables = proposalHeader.globalVariables; this.metrics.recordNewBlock(newGlobalVariables.blockNumber.toNumber(), validTxs.length); @@ -422,8 +426,8 @@ export class Sequencer { await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); } - await this.publisher.validateBlockForSubmission(proposalHeader); if ( + !(await this.publisher.validateBlockForSubmission(proposalHeader)) || !this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length, processedTxsCount: processedTxs.length, @@ -447,7 +451,9 @@ export class Sequencer { // Block is ready, now finalise const { block } = await blockBuilder.finaliseBlock(); - await this.publisher.validateBlockForSubmission(block.header); + if (!(await this.publisher.validateBlockForSubmission(block.header))) { + return; + } const workDuration = workTimer.ms(); this.log.verbose( From 33889666567cbfe418822998c6d8b53bfbc825ea Mon Sep 17 00:00:00 2001 From: LHerskind Date: Wed, 28 Aug 2024 20:19:42 +0000 Subject: [PATCH 003/123] feat: revert if timestamp in future --- l1-contracts/src/core/Rollup.sol | 54 +++++++++++++--------- l1-contracts/src/core/libraries/Errors.sol | 2 +- l1-contracts/test/Rollup.t.sol | 16 +++++++ 3 files changed, 50 insertions(+), 22 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 02a3bdda097..c15862dfe46 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -444,31 +444,10 @@ contract Rollup is Leonidas, IRollup, ITestRollup { uint256 _currentTime, DataStructures.ExecutionFlags memory _flags ) external view override(IRollup) { - // @note This is a convenience function to allow for easier testing of the header validation - // without having to go through the entire process of submitting a block. - // This is not used in production code. - HeaderLib.Header memory header = HeaderLib.decode(_header); _validateHeader(header, _signatures, _digest, _currentTime, _flags); } - function _validateHeader( - HeaderLib.Header memory _header, - SignatureLib.Signature[] memory _signatures, - bytes32 _digest, - uint256 _currentTime, - DataStructures.ExecutionFlags memory _flags - ) internal view { - // @note This is a convenience function to allow for easier testing of the header validation - // without having to go through the entire process of submitting a block. - // This is not used in production code. - - _validateHeaderForSubmissionBase(_header, _currentTime, _flags); - _validateHeaderForSubmissionSequencerSelection( - _header.globalVariables.slotNumber, _signatures, _digest, _currentTime, _flags - ); - } - /** * @notice Processes an incoming L2 block with signatures * @@ -589,6 +568,29 @@ contract Rollup is Leonidas, IRollup, ITestRollup { } } + /** + * @notice Validates the header for submission + * + * @param _header - The proposed block header + * @param _signatures - The signatures for the attestations + * @param _digest - The digest that signatures signed + * @param _currentTime - The time of execution + * @dev - This value is provided to allow for simple simulation of future + * @param _flags - Flags specific to the execution, whether certain checks should be skipped + */ + function _validateHeader( + HeaderLib.Header memory _header, + SignatureLib.Signature[] memory _signatures, + bytes32 _digest, + uint256 _currentTime, + DataStructures.ExecutionFlags memory _flags + ) internal view { + _validateHeaderForSubmissionBase(_header, _currentTime, _flags); + _validateHeaderForSubmissionSequencerSelection( + _header.globalVariables.slotNumber, _signatures, _digest, _currentTime, _flags + ); + } + /** * @notice Validate a header for submission to the pending chain (sequencer selection checks) * @@ -703,6 +705,16 @@ contract Rollup is Leonidas, IRollup, ITestRollup { revert Errors.Rollup__InvalidTimestamp(timestamp, _header.globalVariables.timestamp); } + if (timestamp > _currentTime) { + // @note If you are hitting this error, it is likely because the chain you use have a blocktime that differs + // from the value that we have in the constants. + // When you are encountering this, it will likely be as the sequencer expects to be able to include + // an Aztec block in the "next" ethereum block based on a timestamp that is 12 seconds in the future + // from the last block. However, if the actual will only be 1 second in the future, you will end up + // expecting this value to be in the future. + revert Errors.Rollup__TimestampInFuture(_currentTime, timestamp); + } + // Check if the data is available using availability oracle (change availability oracle if you want a different DA layer) if ( !_flags.ignoreDA && !AVAILABILITY_ORACLE.isAvailable(_header.contentCommitment.txsEffectsHash) diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index 257d7695707..b7ff5d9b4e6 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -56,7 +56,7 @@ library Errors { error Rollup__InvalidChainId(uint256 expected, uint256 actual); // 0x37b5bc12 error Rollup__InvalidVersion(uint256 expected, uint256 actual); // 0x9ef30794 error Rollup__InvalidTimestamp(uint256 expected, uint256 actual); // 0x3132e895 - error Rollup__TimestampInFuture(); // 0xbc1ce916 + error Rollup__TimestampInFuture(uint256 max, uint256 actual); // 0x89f30690 error Rollup__TimestampTooOld(); // 0x72ed9c81 error Rollup__UnavailableTxs(bytes32 txsHash); // 0x414906c3 error Rollup__NothingToPrune(); // 0x850defd3 diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index cd7de61110e..07c67385a80 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -78,6 +78,19 @@ contract RollupTest is DecoderBase { _; } + function testTimestamp() public setUpFor("mixed_block_1") { + // Ensure that the timestamp of the current slot is never in the future. + for (uint256 i = 0; i < 100; i++) { + uint256 slot = rollup.getCurrentSlot(); + uint256 ts = rollup.getTimestampForSlot(slot); + + assertLe(ts, block.timestamp, "Invalid timestamp"); + + vm.warp(block.timestamp + 12); + vm.roll(block.number + 1); + } + } + function testRevertPrune() public setUpFor("mixed_block_1") { if (rollup.isDevNet()) { vm.expectRevert(abi.encodeWithSelector(Errors.DevNet__NoPruningAllowed.selector)); @@ -182,6 +195,9 @@ contract RollupTest is DecoderBase { uint256 portalBalance = portalERC20.balanceOf(address(feeJuicePortal)); + // We jump to the time of the block. (unless it is in the past) + vm.warp(max(block.timestamp, data.decodedHeader.globalVariables.timestamp)); + vm.expectRevert( abi.encodeWithSelector( IERC20Errors.ERC20InsufficientBalance.selector, From 13a60a3d7c9a4035b08f2aa68ff90d3fa1d01a07 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 29 Aug 2024 15:07:50 +0000 Subject: [PATCH 004/123] feat: including txhashes explicitly in the rollup attestations --- l1-contracts/src/core/Rollup.sol | 12 +++++++++--- l1-contracts/src/core/interfaces/IRollup.sol | 2 ++ l1-contracts/src/core/libraries/ConstantsGen.sol | 2 +- l1-contracts/test/sparta/Sparta.t.sol | 7 ++++++- .../crates/types/src/constants.nr | 2 +- .../circuit-types/src/p2p/block_proposal.ts | 8 +++++++- yarn-project/circuits.js/src/constants.gen.ts | 2 +- .../end-to-end/src/e2e_p2p_network.test.ts | 6 ++++-- yarn-project/end-to-end/src/fixtures/utils.ts | 1 + .../src/publisher/l1-publisher.ts | 16 ++++++++++++++-- .../sequencer-client/src/sequencer/sequencer.ts | 16 +++++++++++----- .../src/duties/validation_service.ts | 10 +++++++--- 12 files changed, 64 insertions(+), 20 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index c15862dfe46..7a78f79057f 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -208,11 +208,12 @@ contract Rollup is Leonidas, IRollup, ITestRollup { bytes calldata _header, bytes32 _archive, bytes32 _blockHash, + bytes32[] memory _txHashes, SignatureLib.Signature[] memory _signatures, bytes calldata _body ) external override(IRollup) { AVAILABILITY_ORACLE.publish(_body); - process(_header, _archive, _blockHash, _signatures); + process(_header, _archive, _blockHash, _txHashes, _signatures); } /** @@ -460,15 +461,19 @@ contract Rollup is Leonidas, IRollup, ITestRollup { bytes calldata _header, bytes32 _archive, bytes32 _blockHash, + bytes32[] memory _txHashes, SignatureLib.Signature[] memory _signatures ) public override(IRollup) { // Decode and validate header HeaderLib.Header memory header = HeaderLib.decode(_header); setupEpoch(); + + // TODO: make function for this + bytes32 digest = keccak256(abi.encodePacked(_archive, _txHashes)); _validateHeader({ _header: header, _signatures: _signatures, - _digest: _archive, + _digest: digest, _currentTime: block.timestamp, _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) }); @@ -525,7 +530,8 @@ contract Rollup is Leonidas, IRollup, ITestRollup { override(IRollup) { SignatureLib.Signature[] memory emptySignatures = new SignatureLib.Signature[](0); - process(_header, _archive, _blockHash, emptySignatures); + bytes32[] memory emptyTxHashes = new bytes32[](0); + process(_header, _archive, _blockHash, emptyTxHashes, emptySignatures); } /** diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 4e129e052b5..0b571ca6c39 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -43,6 +43,7 @@ interface IRollup { bytes calldata _header, bytes32 _archive, bytes32 _blockHash, + bytes32[] memory _txHashes, SignatureLib.Signature[] memory _signatures, bytes calldata _body ) external; @@ -57,6 +58,7 @@ interface IRollup { bytes calldata _header, bytes32 _archive, bytes32 _blockHash, + bytes32[] memory _txHashes, SignatureLib.Signature[] memory _signatures ) external; diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 2e04e93dfe2..a1e9567a747 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -104,7 +104,7 @@ library Constants { uint256 internal constant ETHEREUM_SLOT_DURATION = 12; uint256 internal constant AZTEC_SLOT_DURATION = 12; uint256 internal constant AZTEC_EPOCH_DURATION = 48; - uint256 internal constant IS_DEV_NET = 1; + uint256 internal constant IS_DEV_NET = 0; uint256 internal constant GENESIS_ARCHIVE_ROOT = 8142738430000951296386584486068033372964809139261822027365426310856631083550; uint256 internal constant FEE_JUICE_INITIAL_MINT = 20000000000; diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 028533839d2..b8c8af4079d 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -165,6 +165,8 @@ contract SpartaTest is DecoderBase { DecoderBase.Full memory full = load(_name); bytes memory header = full.block.header; bytes32 archive = full.block.archive; + // TODO(md): get the tx hashes from the block - but we do not have them now + // Come back to this bytes memory body = full.block.body; StructToAvoidDeepStacks memory ree; @@ -217,8 +219,11 @@ contract SpartaTest is DecoderBase { ree.shouldRevert = true; } + // TODO(md): this is temp and probably will not pass + bytes32[] memory txHashes = new bytes32[](0); + vm.prank(ree.proposer); - rollup.process(header, archive, bytes32(0), signatures); + rollup.process(header, archive, bytes32(0), txHashes, signatures); if (ree.shouldRevert) { return; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 2367626358b..398db5d31b7 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -137,7 +137,7 @@ global ETHEREUM_SLOT_DURATION: u32 = 12; // AZTEC_SLOT_DURATION should be a multiple of ETHEREUM_SLOT_DURATION global AZTEC_SLOT_DURATION: u32 = ETHEREUM_SLOT_DURATION * 1; global AZTEC_EPOCH_DURATION: u32 = 48; -global IS_DEV_NET: bool = true; +global IS_DEV_NET: bool = false; // The following is taken from building a block and looking at the `lastArchive` value in it. // You can run the `integration_l1_publisher.test.ts` and look at the first blocks in the fixtures. global GENESIS_ARCHIVE_ROOT: Field = 0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e; diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 5c77de9b5e9..0d8c7514b08 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -36,15 +36,17 @@ export class BlockProposal extends Gossipable { /** The sequence of transactions in the block */ public readonly txs: TxHash[], /** The signer of the BlockProposal over the header of the new block*/ - public readonly signature: Signature, + public readonly signature: Signature ) { super(); } + static { this.p2pTopic = createTopicString(TopicType.block_proposal); } + override p2pMessageIdentifier(): Buffer32 { return BlockProposalHash.fromField(this.archive); } @@ -66,6 +68,10 @@ export class BlockProposal extends Gossipable { return this.sender; } + getPayload() { + return serializeToBuffer([this.archive, this.txs]); + } + toBuffer(): Buffer { return serializeToBuffer([this.header, this.archive, this.txs.length, this.txs, this.signature]); } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 1b25acd8207..98f38ea7a4e 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -90,7 +90,7 @@ export const BLOB_SIZE_IN_BYTES = 126976; export const ETHEREUM_SLOT_DURATION = 12; export const AZTEC_SLOT_DURATION = 12; export const AZTEC_EPOCH_DURATION = 48; -export const IS_DEV_NET = 1; +export const IS_DEV_NET = 0; export const GENESIS_ARCHIVE_ROOT = 8142738430000951296386584486068033372964809139261822027365426310856631083550n; export const FEE_JUICE_INITIAL_MINT = 20000000000; export const MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS = 20000; diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index f297b084b47..ad494f2668f 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -35,7 +35,7 @@ describe('e2e_p2p_network', () => { beforeEach(async () => { // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` - const useLocalHost = false; + const useLocalHost = true; if (useLocalHost) { jest.setTimeout(300_000); } @@ -49,7 +49,9 @@ describe('e2e_p2p_network', () => { // Add 1 extra validator if in devnet or NUM_NODES if not. // Each of these will become a validator and sign attestations. + console.log('IS_DEV_NET', IS_DEV_NET); const limit = IS_DEV_NET ? 1 : NUM_NODES; + console.log('limit', limit); for (let i = 0; i < limit; i++) { const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); initialValidators.push(EthAddress.fromString(account.address)); @@ -72,7 +74,7 @@ describe('e2e_p2p_network', () => { } }); - it('should rollup txs from all peers', async () => { + it.only('should rollup txs from all peers', async () => { // create the bootstrap node for the network if (!bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 7fb9bf3f5b1..3e3e94b2b1a 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -461,6 +461,7 @@ export async function setup( const wallets = numberOfAccounts > 0 ? await createAccounts(pxe, numberOfAccounts) : []; const cheatCodes = CheatCodes.create(config.l1RpcUrl, pxe!); + console.log('made account'); const teardown = async () => { if (aztecNode instanceof AztecNodeService) { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 9bb8316dd08..aa1cb70b129 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,4 +1,4 @@ -import { type L2Block, type Signature } from '@aztec/circuit-types'; +import { type L2Block, type Signature, TxHash } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { ETHEREUM_SLOT_DURATION, EthAddress, GENESIS_ARCHIVE_ROOT, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; @@ -70,6 +70,8 @@ export type L1ProcessArgs = { blockHash: Buffer; /** L2 block body. */ body: Buffer; + /** L2 block tx hashes */ + txHashes: TxHash[]; /** Attestations */ attestations?: Signature[]; }; @@ -228,6 +230,11 @@ export class L1Publisher { this.rollupContract.read.archive(), ]); + console.log('metadata for the next eth block'); + console.table({ + slot, + pendingBlockNumber: pendingBlockCount - 1n, + }); return { proposer: EthAddress.fromString(submitter), slot, @@ -264,7 +271,7 @@ export class L1Publisher { * @param block - L2 block to publish. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - public async processL2Block(block: L2Block, attestations?: Signature[]): Promise { + public async processL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]): Promise { const ctx = { blockNumber: block.number, slotNumber: block.header.globalVariables.slotNumber.toBigInt(), @@ -285,6 +292,7 @@ export class L1Publisher { blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), attestations, + txHashes: txHashes ?? [], // NTS(md): should be 32 bytes? }; // Process block and publish the body if needed (if not already published) @@ -458,10 +466,12 @@ export class L1Publisher { try { if (encodedData.attestations) { const attestations = encodedData.attestations.map(attest => attest.toViemSignature()); + const txHashes = encodedData.txHashes.map(txHash => txHash.to0xString()); const args = [ `0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`, `0x${encodedData.blockHash.toString('hex')}`, + txHashes, attestations, ] as const; @@ -498,10 +508,12 @@ export class L1Publisher { try { if (encodedData.attestations) { const attestations = encodedData.attestations.map(attest => attest.toViemSignature()); + const txHashes = encodedData.txHashes.map(txHash => txHash.to0xString()); const args = [ `0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`, `0x${encodedData.blockHash.toString('hex')}`, + txHashes, attestations, `0x${encodedData.body.toString('hex')}`, ] as const; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 2b8fa1ac1bf..8e938fd4cb2 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -6,6 +6,7 @@ import { type ProcessedTx, Signature, Tx, + TxHash, type TxValidator, } from '@aztec/circuit-types'; import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; @@ -473,8 +474,11 @@ export class Sequencer { this.log.verbose(`Flushing completed`); } + const txHashes = validTxs.map(tx => tx.getTxHash()); + this.isFlushing = false; - const attestations = await this.collectAttestations(block); + this.log.verbose('SEQUENCER | collectAttestations'); + const attestations = await this.collectAttestations(block, txHashes); try { await this.publishL2Block(block, attestations); @@ -495,7 +499,7 @@ export class Sequencer { this.isFlushing = true; } - protected async collectAttestations(block: L2Block): Promise { + protected async collectAttestations(block: L2Block, txHashes: TxHash[]): Promise { // @todo This should collect attestations properly and fix the ordering of them to make sense // the current implementation is a PLACEHOLDER and should be nuked from orbit. // It is assuming that there will only be ONE (1) validator, so only one attestation @@ -510,6 +514,8 @@ export class Sequencer { // / \ // _____________/_ __ \_____________ + console.log('SEQUENCER | IS_DEV_NET', IS_DEV_NET); + console.log('SEQUENCER | this.validatorClient', this.validatorClient); if (IS_DEV_NET || !this.validatorClient) { return undefined; } @@ -526,7 +532,7 @@ export class Sequencer { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7974): we do not have transaction[] lists in the block for now // Dont do anything with the proposals for now - just collect them - const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, []); + const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); this.state = SequencerState.PUBLISHING_BLOCK_TO_PEERS; this.validatorClient.broadcastBlockProposal(proposal); @@ -545,11 +551,11 @@ export class Sequencer { @trackSpan('Sequencer.publishL2Block', block => ({ [Attributes.BLOCK_NUMBER]: block.number, })) - protected async publishL2Block(block: L2Block, attestations?: Signature[]) { + protected async publishL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]) { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - const publishedL2Block = await this.publisher.processL2Block(block, attestations); + const publishedL2Block = await this.publisher.processL2Block(block, attestations, txHashes); if (publishedL2Block) { this.lastPublishedBlock = block.number; } else { diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 870f82d9520..da04ce8ac60 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -3,6 +3,7 @@ import { type Header } from '@aztec/circuits.js'; import { type Fr } from '@aztec/foundation/fields'; import { type ValidatorKeyStore } from '../key_store/interface.js'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; export class ValidationService { constructor(private keyStore: ValidatorKeyStore) {} @@ -18,8 +19,8 @@ export class ValidationService { */ async createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { // Note: just signing the archive for now - const archiveBuf = archive.toBuffer(); - const sig = await this.keyStore.sign(archiveBuf); + const payload = serializeToBuffer([archive, txs]); + const sig = await this.keyStore.sign(payload); return new BlockProposal(header, archive, txs, sig); } @@ -33,7 +34,10 @@ export class ValidationService { async attestToProposal(proposal: BlockProposal): Promise { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct - const buf = proposal.archive.toBuffer(); + // TODO: in the function before this, check that all of the txns exist in the payload + + console.log('attesting to proposal', proposal); + const buf = proposal.getPayload(); const sig = await this.keyStore.sign(buf); return new BlockAttestation(proposal.header, proposal.archive, sig); } From 86026f265e9496ca4d158dfc8d572b4681b7a185 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 30 Aug 2024 10:24:18 +0000 Subject: [PATCH 005/123] temp --- l1-contracts/src/core/Rollup.sol | 1 + .../src/core/sequencer_selection/Leonidas.sol | 33 ++++++++-------- l1-contracts/test/sparta/Sparta.t.sol | 30 +++++++++++++++ .../end-to-end/src/e2e_p2p_network.test.ts | 17 +++++++-- yarn-project/end-to-end/src/fixtures/utils.ts | 2 + yarn-project/foundation/src/config/env_var.ts | 4 +- yarn-project/p2p/src/client/p2p_client.ts | 1 + .../p2p/src/service/libp2p_service.ts | 2 +- .../src/publisher/l1-publisher.ts | 27 ++++++++++--- .../src/sequencer/sequencer.ts | 23 +++++++++-- yarn-project/validator-client/src/config.ts | 19 +++++++++- .../validator-client/src/validator.ts | 38 +++++++++++++++++-- 12 files changed, 160 insertions(+), 37 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 7a78f79057f..eae17d00ba6 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -409,6 +409,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { revert Errors.Rollup__SlotAlreadyInChain(lastSlot, slot); } + // Make sure that the proposer is up to date bytes32 tipArchive = archive(); if (tipArchive != _archive) { revert Errors.Rollup__InvalidArchive(tipArchive, _archive); diff --git a/l1-contracts/src/core/sequencer_selection/Leonidas.sol b/l1-contracts/src/core/sequencer_selection/Leonidas.sol index 709e6fad153..194c0fc098c 100644 --- a/l1-contracts/src/core/sequencer_selection/Leonidas.sol +++ b/l1-contracts/src/core/sequencer_selection/Leonidas.sol @@ -133,7 +133,7 @@ contract Leonidas is Ownable, ILeonidas { return epochs[_epoch].committee; } - function getCommitteeAt(uint256 _ts) internal view returns (address[] memory) { + function getCommitteeAt(uint256 _ts) public view returns (address[] memory) { uint256 epochNumber = getEpochAt(_ts); if (epochNumber == 0) { return new address[](0); @@ -161,9 +161,6 @@ contract Leonidas is Ownable, ILeonidas { /** * @notice Get the validator set for the current epoch - * - * @dev Makes a call to setupEpoch under the hood, this should ONLY be called as a view function, and not from within - * this contract. * @return The validator set for the current epoch */ function getCurrentEpochCommittee() external view override(ILeonidas) returns (address[] memory) { @@ -306,23 +303,23 @@ contract Leonidas is Ownable, ILeonidas { return address(0); } - Epoch storage epoch = epochs[epochNumber]; + // Epoch storage epoch = epochs[epochNumber]; - // If the epoch is setup, we can just return the proposer. Otherwise we have to emulate sampling - if (epoch.sampleSeed != 0) { - uint256 committeeSize = epoch.committee.length; - if (committeeSize == 0) { - return address(0); - } + // // If the epoch is setup, we can just return the proposer. Otherwise we have to emulate sampling + // if (epoch.sampleSeed != 0) { + // uint256 committeeSize = epoch.committee.length; + // if (committeeSize == 0) { + // return address(0); + // } - return - epoch.committee[_computeProposerIndex(epochNumber, slot, epoch.sampleSeed, committeeSize)]; - } + // return + // epoch.committee[_computeProposerIndex(epochNumber, slot, epoch.sampleSeed, committeeSize)]; + // } - // Allow anyone if there is no validator set - if (validatorSet.length() == 0) { - return address(0); - } + // // Allow anyone if there is no validator set + // if (validatorSet.length() == 0) { + // return address(0); + // } // Emulate a sampling of the validators uint256 sampleSeed = _getSampleSeed(epochNumber); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index b8c8af4079d..03131e96103 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -28,6 +28,8 @@ import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; * We will skip these test if we are running with IS_DEV_NET = true */ +import "forge-std/console.sol"; + contract SpartaTest is DecoderBase { using MessageHashUtils for bytes32; @@ -275,6 +277,34 @@ contract SpartaTest is DecoderBase { assertEq(rollup.archive(), archive, "Invalid archive"); } + + // We need to make sure that the get committee function is actually + // working the way we expect it to + // This is blowing up the test that we want to test + function testGetCommitteeAtNonSetupEpoch() public setup(4) { + if (Constants.IS_DEV_NET == 1) { + return; + } + + uint256 _epochsToJump = 1; + uint256 pre = rollup.getCurrentEpoch(); + vm.warp( + block.timestamp + uint256(_epochsToJump) * rollup.EPOCH_DURATION() * rollup.SLOT_DURATION() + ); + uint256 post = rollup.getCurrentEpoch(); + assertEq(pre + _epochsToJump, post, "Invalid epoch"); + + // Test that the committee returned from getCommitteeAt is the same as the one returned from getCurrentEpochCommittee + address[] memory committeeAtNow = rollup.getCommitteeAt(block.timestamp); + address[] memory currentCommittee = rollup.getCurrentEpochCommittee(); + assertEq(currentCommittee.length, 4, "Committee should be empty"); + assertEq(committeeAtNow.length, currentCommittee.length, "Committee now and get committee should be the same length"); + + for (uint256 i = 0; i < currentCommittee.length; i++) { + assertEq(currentCommittee[i], committeeAtNow[i], "Committee now and get committee should be the same length"); + } + } + function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { for (uint256 i = 0; i < _contents.length; i++) { vm.prank(_sender); diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index ad494f2668f..c17b92bce54 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -1,7 +1,7 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { CompleteAddress, type DebugLogger, Fr, GrumpkinScalar, type SentTx, TxStatus, sleep } from '@aztec/aztec.js'; -import { EthAddress, IS_DEV_NET } from '@aztec/circuits.js'; +import { CheatCodes, CompleteAddress, type DebugLogger, Fr, GrumpkinScalar, type SentTx, TxStatus, sleep } from '@aztec/aztec.js'; +import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION, EthAddress, IS_DEV_NET } from '@aztec/circuits.js'; import { type BootstrapNode } from '@aztec/p2p'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; @@ -32,6 +32,7 @@ describe('e2e_p2p_network', () => { let teardown: () => Promise; let bootstrapNode: BootstrapNode; let bootstrapNodeEnr: string; + let cheatCodes: CheatCodes; beforeEach(async () => { // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` @@ -57,7 +58,7 @@ describe('e2e_p2p_network', () => { initialValidators.push(EthAddress.fromString(account.address)); } - ({ teardown, config, logger } = await setup(0, { initialValidators, ...options })); + ({ teardown, config, logger, cheatCodes } = await setup(0, { initialValidators, ...options })); bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); @@ -74,6 +75,12 @@ describe('e2e_p2p_network', () => { } }); + const jumpIntoNextEpoch = async () => { + const currentTimestamp = await cheatCodes.eth.timestamp(); + const timeJump = currentTimestamp + (AZTEC_EPOCH_DURATION * AZTEC_SLOT_DURATION); + await cheatCodes.eth.warp(timeJump + 1); + }; + it.only('should rollup txs from all peers', async () => { // create the bootstrap node for the network if (!bootstrapNodeEnr) { @@ -93,8 +100,10 @@ describe('e2e_p2p_network', () => { /*activate validators=*/ !IS_DEV_NET, ); + // Warp an entire epoch ahead. So that the committee exists + await jumpIntoNextEpoch(); // wait a bit for peers to discover each other - await sleep(2000); + await sleep(4000); for (const node of nodes) { const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 3e3e94b2b1a..67152a5635a 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -88,6 +88,7 @@ import { getACVMConfig } from './get_acvm_config.js'; import { getBBConfig } from './get_bb_config.js'; import { isMetricsLoggingRequested, setupMetricsLogger } from './logging.js'; import { Watcher } from './watcher.js'; +import { ConsoleMessage } from 'puppeteer'; export { deployAndInitializeTokenAndBridgeContracts } from '../shared/cross_chain_test_harness.js'; @@ -149,6 +150,7 @@ export const setupL1Contracts = async ( }, }; + console.log("Initial validators", args.initialValidators); const l1Data = await deployL1Contracts(l1RpcUrl, account, chain, logger, l1Artifacts, { l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 2d9a5029351..c0b90f3ab01 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -37,7 +37,7 @@ export type EnvVar = | 'P2P_QUERY_FOR_IP' | 'P2P_TX_POOL_KEEP_PROVEN_FOR' | 'TELEMETRY' - | 'OTEL_SERVICE_NAME' + | 'OTEL_SERVICE_NAME' | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' | 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT' | 'NETWORK_NAME' @@ -105,6 +105,8 @@ export type EnvVar = | 'BOT_FLUSH_SETUP_TRANSACTIONS' | 'VALIDATOR_PRIVATE_KEY' | 'VALIDATOR_DISABLED' + | 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS' + | 'VALIDATOR_ATTESTATIONS_POOLING_INTERVAL_MS' | 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING' | 'PROVER_NODE_MAX_PENDING_JOBS' | 'LOG_LEVEL' diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 6af0180251e..eeb7d2e5b14 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -263,6 +263,7 @@ export class P2PClient implements P2P { } public broadcastProposal(proposal: BlockProposal): void { + this.log.verbose(`Broadcasting proposal ${proposal.p2pMessageIdentifier()} to peers`); return this.p2pService.propagate(proposal); } diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index bc3a7c55594..bf3a9c1c144 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -316,7 +316,7 @@ export class LibP2PService implements P2PService { this.logger.verbose(`Sending message ${identifier} to peers`); const recipientsNum = await this.publishToTopic(parent.p2pTopic, message.toBuffer()); - this.logger.verbose(`Sent tx ${identifier} to ${recipientsNum} peers`); + this.logger.verbose(`Sent message ${identifier} to ${recipientsNum} peers`); } // Libp2p seems to hang sometimes if new peers are initiating connections. diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index aa1cb70b129..bf72924548c 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -90,6 +90,7 @@ export type L1SubmitProofArgs = { aggregationObject: Buffer; }; + export type MetadataForSlot = { proposer: EthAddress; slot: bigint; @@ -185,6 +186,7 @@ export class L1Publisher { return [slot, blockNumber]; } + public async validateBlockForSubmission( header: Header, digest: Buffer = new Fr(GENESIS_ARCHIVE_ROOT).toBuffer(), @@ -230,11 +232,6 @@ export class L1Publisher { this.rollupContract.read.archive(), ]); - console.log('metadata for the next eth block'); - console.table({ - slot, - pendingBlockNumber: pendingBlockCount - 1n, - }); return { proposer: EthAddress.fromString(submitter), slot, @@ -243,8 +240,28 @@ export class L1Publisher { }; } + + // /** + // * @notice Calls `getCommitteeAt` with the time of the next Ethereum block + // * @return committee - The committee at the next Ethereum block + // */ + // public async getCommitteeAtNextEthBlock(): Promise { + // // TODO(md): reuse as a helper? + // const lastBlockTimestamp = (await this.publicClient.getBlock()).timestamp; + // console.log('lastBlockTimestamp', lastBlockTimestamp); + // const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); + // console.log('ts', ts); + // const committee = await this.rollupContract.read.getCommitteeAt([ + // ts, + // ]); + // console.log('returned committee', committee); + // return committee.map(EthAddress.fromString); + // } + + public async getCurrentEpochCommittee(): Promise { const committee = await this.rollupContract.read.getCurrentEpochCommittee(); + console.log('returned committee', committee); return committee.map(EthAddress.fromString); } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 8e938fd4cb2..2b1658b77a9 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -477,8 +477,9 @@ export class Sequencer { const txHashes = validTxs.map(tx => tx.getTxHash()); this.isFlushing = false; - this.log.verbose('SEQUENCER | collectAttestations'); + this.log.verbose("Collecting attestations"); const attestations = await this.collectAttestations(block, txHashes); + this.log.verbose("Attestations collected"); try { await this.publishL2Block(block, attestations); @@ -514,16 +515,22 @@ export class Sequencer { // / \ // _____________/_ __ \_____________ - console.log('SEQUENCER | IS_DEV_NET', IS_DEV_NET); - console.log('SEQUENCER | this.validatorClient', this.validatorClient); if (IS_DEV_NET || !this.validatorClient) { + this.log.verbose("ATTEST | Skipping attestation collection"); return undefined; } + // const committeeAtNext = await this.publisher.getCommitteeAtNextEthBlock(); + // this.log.verbose(`ATTEST | committee at the next eth block length ${committeeAtNext.length}`); + + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7962): inefficient to have a round trip in here - this should be cached const committee = await this.publisher.getCurrentEpochCommittee(); + this.log.verbose(`ATTEST | committee length ${committee.length}`); if (committee.length === 0) { + this.log.verbose(`ATTEST | committee length is 0, skipping`); + throw new Error('Committee length is 0, WHAT THE FUCK'); return undefined; } @@ -532,13 +539,23 @@ export class Sequencer { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7974): we do not have transaction[] lists in the block for now // Dont do anything with the proposals for now - just collect them + this.log.verbose("ATTEST | Creating block proposal"); const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); this.state = SequencerState.PUBLISHING_BLOCK_TO_PEERS; + this.log.verbose("Broadcasting block proposal to validators"); this.validatorClient.broadcastBlockProposal(proposal); + // Note do we know if it is wating for attestations as it thinks it should be + // proposing the block? + this.state = SequencerState.WAITING_FOR_ATTESTATIONS; const attestations = await this.validatorClient.collectAttestations(proposal, numberOfRequiredAttestations); + this.log.verbose("Collected attestations from validators"); + + // TODO: clean: SELF REPORT LMAO + const selfSign = await this.validatorClient.attestToProposal(proposal); + attestations.push(selfSign); // note: the smart contract requires that the signatures are provided in the order of the committee return await orderAttestations(attestations, committee); diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 083beead6c1..65333c33961 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -1,5 +1,6 @@ +import { AZTEC_SLOT_DURATION } from '@aztec/circuits.js'; import { NULL_KEY } from '@aztec/ethereum'; -import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; +import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; /** * The Validator Configuration @@ -10,6 +11,12 @@ export interface ValidatorClientConfig { /** Do not run the validator */ disableValidator: boolean; + + /** Interval between polling for new attestations from peers */ + attestationPoolingIntervalMs: number; + + /** Wait for attestations timeout */ + attestationWaitTimeoutMs: number; } export const validatorClientConfigMappings: ConfigMappingsType = { @@ -23,6 +30,16 @@ export const validatorClientConfigMappings: ConfigMappingsType { + // Check that we have all of the proposal's transactions in our p2p client + const txHashes = proposal.txs; + const haveTx = txHashes.map(txHash => this.p2pClient.getTxStatus(txHash)); + + if (haveTx.length !== txHashes.length) { + console.log("WE ARE MISSING TRANSCTIONS"); + } + return this.validationService.attestToProposal(proposal); } @@ -82,18 +99,31 @@ export class ValidatorClient implements Validator { // Wait and poll the p2pClients attestation pool for this block // until we have enough attestations + const startTime = Date.now(); + const slot = proposal.header.globalVariables.slotNumber.toBigInt(); + // TODO: tidy + numberOfRequiredAttestations -= 1; // We self sign + this.log.info(`Waiting for ${numberOfRequiredAttestations} attestations for slot: ${slot}`); let attestations: BlockAttestation[] = [await this.attestToProposal(proposal)]; while (attestations.length < numberOfRequiredAttestations) { attestations = await this.p2pClient.getAttestationsForSlot(slot); + // Rememebr we can subtract 1 from this if we self sign if (attestations.length < numberOfRequiredAttestations) { + this.log.verbose(`SEAN: collected ${attestations.length} attestations so far ${numberOfRequiredAttestations} required`); this.log.verbose(`Waiting ${this.attestationPoolingIntervalMs}ms for more attestations...`); await sleep(this.attestationPoolingIntervalMs); } + + // FIX(md): kinna sad looking code + if (Date.now() - startTime > this.attestationWaitTimeoutMs) { + this.log.error(`Timeout waiting for ${numberOfRequiredAttestations} attestations for slot, ${slot}`); + throw new Error(`Timeout waiting for ${numberOfRequiredAttestations} attestations for slot, ${slot}`); + } } this.log.info(`Collected all attestations for slot, ${slot}`); From fc7a04a18972669fb1a479b1edc3c6f47042a48e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 30 Aug 2024 18:36:54 +0000 Subject: [PATCH 006/123] temp --- l1-contracts/src/core/Rollup.sol | 25 +------------------ .../src/p2p/block_attestation.ts | 12 ++++++--- yarn-project/circuit-types/src/p2p/mocks.ts | 7 ++++-- .../end-to-end/src/e2e_p2p_network.test.ts | 4 +-- yarn-project/end-to-end/src/fixtures/utils.ts | 1 - .../p2p/src/attestation_pool/mocks.ts | 9 ++++--- .../src/publisher/l1-publisher.ts | 16 +++++++----- .../src/sequencer/sequencer.test.ts | 3 ++- .../src/sequencer/sequencer.ts | 6 ++++- .../src/duties/validation_service.ts | 3 +-- 10 files changed, 40 insertions(+), 46 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 32a409434da..6c1d7da2273 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -563,29 +563,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { ); } - /** - * @notice Validates the header for submission - * - * @param _header - The proposed block header - * @param _signatures - The signatures for the attestations - * @param _digest - The digest that signatures signed - * @param _currentTime - The time of execution - * @dev - This value is provided to allow for simple simulation of future - * @param _flags - Flags specific to the execution, whether certain checks should be skipped - */ - function _validateHeader( - HeaderLib.Header memory _header, - SignatureLib.Signature[] memory _signatures, - bytes32 _digest, - uint256 _currentTime, - DataStructures.ExecutionFlags memory _flags - ) internal view { - _validateHeaderForSubmissionBase(_header, _currentTime, _flags); - _validateHeaderForSubmissionSequencerSelection( - _header.globalVariables.slotNumber, _signatures, _digest, _currentTime, _flags - ); - } - /** * @notice Validate a header for submission to the pending chain (sequencer selection checks) * @@ -641,7 +618,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { revert Errors.Rollup__InvalidEpoch(currentEpoch, epochNumber); } - _processPendingBlock(_slot, _signatures, _digest, _flags); + _proposePendingBlock(_slot, _signatures, _digest, _flags); } /** diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index fd38d691172..2b9a13be63c 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -8,6 +8,7 @@ import { recoverMessageAddress } from 'viem'; import { Gossipable } from './gossipable.js'; import { Signature } from './signature.js'; import { TopicType, createTopicString } from './topic_type.js'; +import { TxHash } from '../tx/tx_hash.js'; export class BlockAttestationHash extends Buffer32 { constructor(hash: Buffer) { @@ -31,6 +32,7 @@ export class BlockAttestation extends Gossipable { public readonly header: Header, // TODO(https://github.com/AztecProtocol/aztec-packages/pull/7727#discussion_r1713670830): temporary public readonly archive: Fr, + public readonly txHashes: TxHash[], /** The signature of the block attester */ public readonly signature: Signature, ) { @@ -53,8 +55,10 @@ export class BlockAttestation extends Gossipable { async getSender() { if (!this.sender) { // Recover the sender from the attestation + const payload = serializeToBuffer([this.archive, this.txHashes]); + const payload0x: `0x${string}` = `0x${payload.toString('hex')}`; const address = await recoverMessageAddress({ - message: { raw: this.p2pMessageIdentifier().to0xString() }, + message: { raw: payload0x }, signature: this.signature.to0xString(), }); // Cache the sender for later use @@ -65,15 +69,15 @@ export class BlockAttestation extends Gossipable { } toBuffer(): Buffer { - return serializeToBuffer([this.header, this.archive, this.signature]); + return serializeToBuffer([this.header, this.archive, this.txHashes.length, this.txHashes, this.signature]); } static fromBuffer(buf: Buffer | BufferReader): BlockAttestation { const reader = BufferReader.asReader(buf); - return new BlockAttestation(reader.readObject(Header), reader.readObject(Fr), reader.readObject(Signature)); + return new BlockAttestation(reader.readObject(Header), reader.readObject(Fr), reader.readArray(reader.readNumber(), TxHash), reader.readObject(Signature)); } static empty(): BlockAttestation { - return new BlockAttestation(Header.empty(), Fr.ZERO, Signature.empty()); + return new BlockAttestation(Header.empty(), Fr.ZERO, [], Signature.empty()); } } diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index f85ba76a6ae..a177774371e 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -8,6 +8,7 @@ import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; import { Signature } from './signature.js'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise => { signer = signer || randomSigner(); @@ -26,9 +27,11 @@ export const makeBlockAttestation = async (signer?: PrivateKeyAccount): Promise< const blockHeader = makeHeader(1); const archive = Fr.random(); - const signature = Signature.from0xString(await signer.signMessage({ message: { raw: archive.toString() } })); + const txs = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); + const hash: `0x${string}` = `0x${serializeToBuffer([archive, txs]).toString('hex')}`; + const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); - return new BlockAttestation(blockHeader, archive, signature); + return new BlockAttestation(blockHeader, archive, txs, signature); }; export const randomSigner = (): PrivateKeyAccount => { diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index c17b92bce54..69098d1937c 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -36,7 +36,7 @@ describe('e2e_p2p_network', () => { beforeEach(async () => { // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` - const useLocalHost = true; + const useLocalHost = false; if (useLocalHost) { jest.setTimeout(300_000); } @@ -101,7 +101,7 @@ describe('e2e_p2p_network', () => { ); // Warp an entire epoch ahead. So that the committee exists - await jumpIntoNextEpoch(); + // await jumpIntoNextEpoch(); // wait a bit for peers to discover each other await sleep(4000); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 5f4496e9975..051e609f177 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -462,7 +462,6 @@ export async function setup( const wallets = numberOfAccounts > 0 ? await createAccounts(pxe, numberOfAccounts) : []; const cheatCodes = CheatCodes.create(config.l1RpcUrl, pxe!); - console.log('made account'); const teardown = async () => { if (aztecNode instanceof AztecNodeService) { diff --git a/yarn-project/p2p/src/attestation_pool/mocks.ts b/yarn-project/p2p/src/attestation_pool/mocks.ts index 22e5da70a94..00194d52b96 100644 --- a/yarn-project/p2p/src/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/attestation_pool/mocks.ts @@ -1,6 +1,7 @@ -import { BlockAttestation, Signature } from '@aztec/circuit-types'; +import { BlockAttestation, Signature, TxHash } from '@aztec/circuit-types'; import { makeHeader } from '@aztec/circuits.js/testing'; import { Fr } from '@aztec/foundation/fields'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type PrivateKeyAccount } from 'viem'; import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; @@ -25,9 +26,11 @@ export const mockAttestation = async (signer: PrivateKeyAccount, slot: number = // Use arbitrary numbers for all other than slot const header = makeHeader(1, 2, slot); const archive = Fr.random(); - const message = archive.toString(); + const txs = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); + + const message: `0x${string}` = `0x${serializeToBuffer([archive, txs]).toString('hex')}`; const sigString = await signer.signMessage({ message }); const signature = Signature.from0xString(sigString); - return new BlockAttestation(header, archive, signature); + return new BlockAttestation(header, archive, txs, signature); }; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 6643c0d5a88..371194b4bb1 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,4 +1,4 @@ -import { type L2Block, type Signature } from '@aztec/circuit-types'; +import { TxHash, type L2Block, type Signature } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { ETHEREUM_SLOT_DURATION, EthAddress, GENESIS_ARCHIVE_ROOT, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; @@ -302,7 +302,7 @@ export class L1Publisher { return false; } - const processTxArgs = { + const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), @@ -311,6 +311,8 @@ export class L1Publisher { txHashes: txHashes ?? [], // NTS(md): should be 32 bytes? }; + console.log('proposeTxArgs', proposeTxArgs); + // Publish body and propose block (if not already published) if (!this.interrupted) { let txHash; @@ -318,9 +320,9 @@ export class L1Publisher { if (await this.checkIfTxsAreAvailable(block)) { this.log.verbose(`Transaction effects of block ${block.number} already published.`, ctx); - txHash = await this.sendProposeWithoutBodyTx(processTxArgs); + txHash = await this.sendProposeWithoutBodyTx(proposeTxArgs); } else { - txHash = await this.sendProposeTx(processTxArgs); + txHash = await this.sendProposeTx(proposeTxArgs); } if (!txHash) { @@ -534,11 +536,13 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - if (!L1Publisher.SKIP_SIMULATION) { + console.log('args', args); + + // if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.propose(args, { account: this.account, }); - } + // } return await this.rollupContract.write.propose(args, { account: this.account, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 24a52633326..2761a8323af 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -79,7 +79,8 @@ describe('sequencer', () => { return undefined; } - const attestation = new BlockAttestation(block.header, archive, mockedSig); + // TODO(md): might be errors in here + const attestation = new BlockAttestation(block.header, archive, [], mockedSig); (attestation as any).sender = committee[0]; return [attestation]; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 2b1658b77a9..60cee46ffa9 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -482,7 +482,7 @@ export class Sequencer { this.log.verbose("Attestations collected"); try { - await this.publishL2Block(block, attestations); + await this.publishL2Block(block, attestations, txHashes); this.metrics.recordPublishedBlock(workDuration); this.log.info( `Submitted rollup block ${block.number} with ${ @@ -557,6 +557,7 @@ export class Sequencer { const selfSign = await this.validatorClient.attestToProposal(proposal); attestations.push(selfSign); + // note: the smart contract requires that the signatures are provided in the order of the committee return await orderAttestations(attestations, committee); } @@ -572,6 +573,9 @@ export class Sequencer { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; + console.log('attestations', attestations); + console.log('txHashes', txHashes); + const publishedL2Block = await this.publisher.processL2Block(block, attestations, txHashes); if (publishedL2Block) { this.lastPublishedBlock = block.number; diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index da04ce8ac60..383544345ca 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -36,9 +36,8 @@ export class ValidationService { // TODO: in the function before this, check that all of the txns exist in the payload - console.log('attesting to proposal', proposal); const buf = proposal.getPayload(); const sig = await this.keyStore.sign(buf); - return new BlockAttestation(proposal.header, proposal.archive, sig); + return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); } } From 9eed29802926b2654801a9c804cc98218de773cc Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 2 Sep 2024 11:38:08 +0000 Subject: [PATCH 007/123] temp --- l1-contracts/src/core/Rollup.sol | 3 ++- l1-contracts/src/core/interfaces/IRollup.sol | 4 +++- .../sequencer-client/src/publisher/l1-publisher.ts | 9 +++++---- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 4 ++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 6c1d7da2273..a14459b7e49 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -199,7 +199,8 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * @param _signatures - Signatures from the validators * @param _body - The body of the L2 block */ - function propose( + // Temp change name to fix viem issue + function proposeWithBody( bytes calldata _header, bytes32 _archive, bytes32 _blockHash, diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index d344202ce66..717b53952ae 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -38,7 +38,9 @@ interface IRollup { function OUTBOX() external view returns (IOutbox); - function propose( + // Temp: turns out there is a viem bug where it cannot differentiate between the two + // different types + function proposeWithBody( bytes calldata _header, bytes32 _archive, bytes32 _blockHash, diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 371194b4bb1..b553e381d4f 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -311,7 +311,7 @@ export class L1Publisher { txHashes: txHashes ?? [], // NTS(md): should be 32 bytes? }; - console.log('proposeTxArgs', proposeTxArgs); + // console.log('proposeTxArgs', proposeTxArgs); // Publish body and propose block (if not already published) if (!this.interrupted) { @@ -536,15 +536,16 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - console.log('args', args); + // console.log('args length', args.length); // if (!L1Publisher.SKIP_SIMULATION) { - await this.rollupContract.simulate.propose(args, { + const simulationResult = await this.rollupContract.simulate.proposeWithBody(args, { account: this.account, }); + console.log(simulationResult); // } - return await this.rollupContract.write.propose(args, { + return await this.rollupContract.write.proposeWithBody(args, { account: this.account, }); } else { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 60cee46ffa9..b398824a5fc 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -573,8 +573,8 @@ export class Sequencer { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - console.log('attestations', attestations); - console.log('txHashes', txHashes); + // console.log('attestations', attestations); + // console.log('txHashes', txHashes); const publishedL2Block = await this.publisher.processL2Block(block, attestations, txHashes); if (publishedL2Block) { From cc09455b58035356e63a343f0cfaabb41986b1b2 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 2 Sep 2024 17:30:40 +0000 Subject: [PATCH 008/123] temp --- yarn-project/end-to-end/src/fixtures/utils.ts | 1 + yarn-project/end-to-end/src/fixtures/watcher.ts | 8 ++++++++ .../sequencer-client/src/publisher/l1-publisher.ts | 12 ++++++++++++ .../sequencer-client/src/sequencer/sequencer.ts | 5 +++++ 4 files changed, 26 insertions(+) diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 051e609f177..d36464718fe 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -453,6 +453,7 @@ export async function setup( } } + // A watcher that performs time jumps in the sequencer when they are required const watcher = new Watcher( new EthCheatCodes(config.l1RpcUrl), deployL1ContractsValues.l1ContractAddresses.rollupAddress, diff --git a/yarn-project/end-to-end/src/fixtures/watcher.ts b/yarn-project/end-to-end/src/fixtures/watcher.ts index 0d254fbf5ff..b65b3010f9a 100644 --- a/yarn-project/end-to-end/src/fixtures/watcher.ts +++ b/yarn-project/end-to-end/src/fixtures/watcher.ts @@ -6,6 +6,13 @@ import { RollupAbi } from '@aztec/l1-artifacts'; import { type GetContractReturnType, type HttpTransport, type PublicClient, getAddress, getContract } from 'viem'; import type * as chains from 'viem/chains'; +/** + * Watcher + * + * The watcher is used within tests in order to adjust the timestamps of an automine chain to have the correct block.timestamp values + * that are expected with the pending chain's timeliness requirements. + * + */ export class Watcher { private rollup: GetContractReturnType>; @@ -55,6 +62,7 @@ export class Watcher { this.logger.error(`Failed to warp to timestamp ${timestamp}: ${e}`); } + console.log("The watcher has jumped slot") this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`); } } catch (err) { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index b553e381d4f..15034abae41 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -12,6 +12,7 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import pick from 'lodash.pick'; import { + BaseError, ContractFunctionRevertedError, type GetContractReturnType, type Hex, @@ -567,6 +568,17 @@ export class L1Publisher { }); } } catch (err) { + // TODO: tidy up this error + if (err instanceof BaseError) { + const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); + if (revertError instanceof ContractFunctionRevertedError) { + // TODO: turn this into a function + const errorName = revertError.data?.errorName ?? ""; + const args = revertError.metaMessages && revertError.metaMessages?.length > 1 ? revertError.metaMessages[1].trimStart() : ''; + this.log.error(`propose failed with "${errorName}${args}"`) + return undefined; + } + } this.log.error(`Rollup publish failed`, err); return undefined; } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index b398824a5fc..9d971630b0f 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -216,7 +216,12 @@ export class Sequencer { let slot: bigint; try { slot = await this.mayProposeBlock(chainTipArchive, BigInt(newBlockNumber)); + // TODO(md) below in this block are for debugging remove + const seqEnr = this.p2pClient.getEnr(); + console.log("SEAN | ", seqEnr, " IS able to propose a block"); } catch (err) { + const seqEnr = this.p2pClient.getEnr(); + console.log("SEAN | ", seqEnr, " NOT able to propose a block"); this.log.debug(`Cannot propose for block ${newBlockNumber}`); return; } From 4727cd93c62d0916197ea6054fe35f13a7760402 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:28:51 +0000 Subject: [PATCH 009/123] temp: get passing with txhash payloads --- .../archiver/src/archiver/eth_log_handlers.ts | 8 +++++--- .../circuit-types/src/p2p/block_attestation.ts | 9 ++++++--- .../circuit-types/src/p2p/block_proposal.ts | 7 +++++-- .../global_variable_builder/global_builder.ts | 2 ++ .../src/publisher/l1-publisher.ts | 18 +++++++++++++----- .../src/sequencer/sequencer.ts | 15 +++++++++------ .../src/duties/validation_service.ts | 7 +++++-- 7 files changed, 45 insertions(+), 21 deletions(-) diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index 04d8a8b5339..5c8eefd64e5 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -110,7 +110,7 @@ async function getBlockMetadataFromRollupTx( data, }); - if (!(functionName === 'propose')) { + if (!(functionName === 'propose' || functionName === 'proposeWithBody')) { throw new Error(`Unexpected method called ${functionName}`); } const [headerHex, archiveRootHex, _] = args! as readonly [Hex, Hex, Hex]; @@ -152,10 +152,11 @@ async function getBlockBodiesFromAvailabilityOracleTx( // [ // "propose(bytes,bytes32,bytes32,(bool,uint8,bytes32,bytes32)[],bytes)": "08978fe9", // "propose(bytes,bytes32,bytes32,bytes)": "81e6f472", + // "proposeWithBody(bytes,bytes32,bytes32,bytes32[],(bool,uint8,bytes32,bytes32)[],bytes)": "b2283b07", // "publish(bytes calldata _body)" // ] - const DATA_INDEX = [4, 3, 0]; - const SUPPORTED_SIGS = ['0x08978fe9', '0x81e6f472', '0x7fd28346']; + const DATA_INDEX = [4, 3, 5, 0]; // index where the body is, in the parameters list + const SUPPORTED_SIGS = ['0x08978fe9', '0x81e6f472', '0xb2283b07', '0x7fd28346']; const signature = slice(data, 0, 4); @@ -163,6 +164,7 @@ async function getBlockBodiesFromAvailabilityOracleTx( throw new Error(`Unexpected method called ${signature}`); } + // Check if explicitly calling the DA oracle if (signature === SUPPORTED_SIGS[SUPPORTED_SIGS.length - 1]) { const { args } = decodeFunctionData({ abi: AvailabilityOracleAbi, diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 2b9a13be63c..b98771f4a40 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -3,7 +3,7 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { recoverMessageAddress } from 'viem'; +import { keccak256, recoverMessageAddress } from 'viem'; import { Gossipable } from './gossipable.js'; import { Signature } from './signature.js'; @@ -56,9 +56,12 @@ export class BlockAttestation extends Gossipable { if (!this.sender) { // Recover the sender from the attestation const payload = serializeToBuffer([this.archive, this.txHashes]); - const payload0x: `0x${string}` = `0x${payload.toString('hex')}`; + console.log("ATTESTATION PAYLOAD", payload); + // TODO(md) - temporarily hashing again to hav eless changes in the rollup + const hashed = keccak256(payload); + // const payload0x: `0x${string}` = `0x${payload.toString('hex')}`; const address = await recoverMessageAddress({ - message: { raw: payload0x }, + message: { raw: hashed }, signature: this.signature.to0xString(), }); // Cache the sender for later use diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 0d8c7514b08..3195a8c3f4c 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -3,7 +3,7 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { recoverMessageAddress } from 'viem'; +import { keccak256, recoverMessageAddress } from 'viem'; import { TxHash } from '../tx/tx_hash.js'; import { Gossipable } from './gossipable.js'; @@ -57,8 +57,11 @@ export class BlockProposal extends Gossipable { async getSender() { if (!this.sender) { // performance note(): this signature method requires another hash behind the scenes + const hashed = keccak256(this.getPayload()); const address = await recoverMessageAddress({ - message: { raw: this.p2pMessageIdentifier().to0xString() }, + // TODO(md): fix this up + // message: { raw: this.p2pMessageIdentifier().to0xString() }, + message: { raw: hashed }, signature: this.signature.to0xString(), }); // Cache the sender for later use diff --git a/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts b/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts index 91da4e1b46c..4904068fb11 100644 --- a/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts +++ b/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts @@ -69,7 +69,9 @@ export class GlobalVariableBuilder { slotNumber = await this.rollupContract.read.getSlotAt([ts]); } + // TODO: why does this work if I - 1 it const timestamp = await this.rollupContract.read.getTimestampForSlot([slotNumber]); + console.log("timestamp from the chain ", timestamp, " for the slot number ", slotNumber); const slotFr = new Fr(slotNumber); const timestampFr = new Fr(timestamp); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index c3f230d7a9d..81aa107fa96 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -32,6 +32,7 @@ import type * as chains from 'viem/chains'; import { type PublisherConfig, type TxSenderConfig } from './config.js'; import { L1PublisherMetrics } from './l1-publisher-metrics.js'; +import { keccak256 } from '@aztec/foundation/crypto'; /** * Stats for a sent transaction. @@ -298,6 +299,7 @@ export class L1Publisher { * @param block - L2 block to publish. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ + // TODO: rename propose public async processL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]): Promise { const ctx = { blockNumber: block.number, @@ -305,7 +307,9 @@ export class L1Publisher { blockHash: block.hash().toString(), }; - const processTxArgs = { + // TODO: move this to take in the proposal to get the digest - rather than manually + const tempDigest = keccak256(serializeToBuffer(block.archive.root, txHashes ?? [])); + const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), @@ -314,7 +318,7 @@ export class L1Publisher { txHashes: txHashes ?? [], // NTS(md): should be 32 bytes? }; - // console.log('proposeTxArgs', proposeTxArgs); + console.log('proposeTxArgs', proposeTxArgs); // Publish body and propose block (if not already published) if (!this.interrupted) { @@ -328,7 +332,8 @@ export class L1Publisher { // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which // make time consistency checks break. await this.validateBlockForSubmission(block.header, { - digest: block.archive.root.toBuffer(), + // digest: block.archive.root.toBuffer(), // THIS IS NOT THE DIGEST ANYMORE!!!!! + digest: tempDigest, signatures: attestations ?? [], }); @@ -552,12 +557,13 @@ export class L1Publisher { // console.log('args length', args.length); - // if (!L1Publisher.SKIP_SIMULATION) { + // We almost always want to skip simulation here if we are not already within the slot, else we will be one slot ahead + if (!L1Publisher.SKIP_SIMULATION) { const simulationResult = await this.rollupContract.simulate.proposeWithBody(args, { account: this.account, }); console.log(simulationResult); - // } + } return await this.rollupContract.write.proposeWithBody(args, { account: this.account, @@ -570,12 +576,14 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; + // We almost always want to skip simulation here if we are not already within the slot, else we will be one slot ahead if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.propose(args, { account: this.account, }); } + // TODO(md): first tx is failing in here return await this.rollupContract.write.propose(args, { account: this.account, }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 39e3bbc0ac0..906583826aa 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -244,6 +244,7 @@ export class Sequencer { this._feeRecipient, slot, ); + console.log("new global variables ", newGlobalVariables); // If I created a "partial" header here that should make our job much easier. const proposalHeader = new Header( @@ -512,14 +513,15 @@ export class Sequencer { this.isFlushing = true; } - protected async collectAttestations(block: L2Block): Promise { + protected async collectAttestations(block: L2Block, txHashes: TxHash[]): Promise { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7962): inefficient to have a round trip in here - this should be cached const committee = await this.publisher.getCurrentEpochCommittee(); this.log.verbose(`ATTEST | committee length ${committee.length}`); if (committee.length === 0) { this.log.verbose(`ATTEST | committee length is 0, skipping`); - throw new Error('Committee length is 0, WHAT THE FUCK'); + // TODO(md): remove error + throw new Error('Committee length is 0, WHAT'); return undefined; } @@ -546,11 +548,12 @@ export class Sequencer { this.state = SequencerState.WAITING_FOR_ATTESTATIONS; const attestations = await this.validatorClient.collectAttestations(proposal, numberOfRequiredAttestations); - this.log.verbose("Collected attestations from validators"); + this.log.verbose(`Collected attestations from validators, number of attestations: ${attestations.length}`); - // TODO: clean: SELF REPORT LMAO - const selfSign = await this.validatorClient.attestToProposal(proposal); - attestations.push(selfSign); + // NOTE: is the self report now done in the method above + // // TODO: clean: SELF REPORT LMAO + // const selfSign = await this.validatorClient.attestToProposal(proposal); + // attestations.push(selfSign); // note: the smart contract requires that the signatures are provided in the order of the committee diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 383544345ca..3d8577243e9 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -4,6 +4,7 @@ import { type Fr } from '@aztec/foundation/fields'; import { type ValidatorKeyStore } from '../key_store/interface.js'; import { serializeToBuffer } from '@aztec/foundation/serialize'; +import { keccak256 } from '@aztec/foundation/crypto'; export class ValidationService { constructor(private keyStore: ValidatorKeyStore) {} @@ -20,7 +21,9 @@ export class ValidationService { async createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { // Note: just signing the archive for now const payload = serializeToBuffer([archive, txs]); - const sig = await this.keyStore.sign(payload); + // TODO(temp hash it together before signing) + const hashed = keccak256(payload); + const sig = await this.keyStore.sign(hashed); return new BlockProposal(header, archive, txs, sig); } @@ -36,7 +39,7 @@ export class ValidationService { // TODO: in the function before this, check that all of the txns exist in the payload - const buf = proposal.getPayload(); + const buf = keccak256(proposal.getPayload()); const sig = await this.keyStore.sign(buf); return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); } From b4c2a46ed38dae792602befab19bffcc142c95e9 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 5 Sep 2024 17:39:35 +0000 Subject: [PATCH 010/123] fix: make sure transactions are available in the tx pool --- .../src/p2p/block_attestation.ts | 3 +- .../end-to-end/src/e2e_p2p_network.test.ts | 92 +++++++++++++++---- yarn-project/p2p/src/client/p2p_client.ts | 27 +++++- .../p2p/src/service/reqresp/reqresp.ts | 5 + .../src/sequencer/sequencer.ts | 2 + .../validator-client/src/validator.ts | 50 ++++++++-- 6 files changed, 148 insertions(+), 31 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index b98771f4a40..087da96fd15 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -56,8 +56,7 @@ export class BlockAttestation extends Gossipable { if (!this.sender) { // Recover the sender from the attestation const payload = serializeToBuffer([this.archive, this.txHashes]); - console.log("ATTESTATION PAYLOAD", payload); - // TODO(md) - temporarily hashing again to hav eless changes in the rollup + // TODO(md) - temporarily hashing again to have less changes in the rollup const hashed = keccak256(payload); // const payload0x: `0x${string}` = `0x${payload.toString('hex')}`; const address = await recoverMessageAddress({ diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 681846dba4b..306baccc8e1 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -84,6 +84,14 @@ describe('e2e_p2p_network', () => { await cheatCodes.warp(Number(timestamp)); }); + + const stopNodes = async (bootstrap: BootstrapNode, nodes: AztecNodeService[]) => { + for (const node of nodes) { + await node.stop(); + } + await bootstrap.stop(); + } + afterEach(() => teardown()); afterAll(() => { @@ -92,13 +100,7 @@ describe('e2e_p2p_network', () => { } }); - const jumpIntoNextEpoch = async () => { - const currentTimestamp = await cheatCodes.eth.timestamp(); - const timeJump = currentTimestamp + (AZTEC_EPOCH_DURATION * AZTEC_SLOT_DURATION); - await cheatCodes.eth.warp(timeJump + 1); - }; - - it.only('should rollup txs from all peers', async () => { + it('should rollup txs from all peers', async () => { // create the bootstrap node for the network if (!bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); @@ -116,8 +118,6 @@ describe('e2e_p2p_network', () => { BOOT_NODE_UDP_PORT, ); - // Warp an entire epoch ahead. So that the committee exists - // await jumpIntoNextEpoch(); // wait a bit for peers to discover each other await sleep(4000); @@ -137,13 +137,71 @@ describe('e2e_p2p_network', () => { ); // shutdown all nodes. - for (const context of contexts) { - await context.node.stop(); - await context.pxeService.stop(); - } - await bootstrapNode.stop(); + await stopNodes(bootstrapNode, nodes); }); + it("should produce an attestation by requesting tx data over the p2p network", async () => { + + // Birds eye overview of the test + // We spin up x nodes + // We turn off receiving a tx via gossip from one of the nodes + // We send a transaction and gossip it to other nodes + // This node will receive an attestation that it does not have the data for + // It will request this data over the p2p layer + // We receive all of the attestations that we need and we produce the block + + if (!bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + // create our network of nodes and submit txs into each of them + // the number of txs per node and the number of txs per rollup + // should be set so that the only way for rollups to be built + // is if the txs are successfully gossiped around the nodes. + const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = await createNodes( + config, + PEER_ID_PRIVATE_KEYS, + bootstrapNodeEnr, + NUM_NODES , + BOOT_NODE_UDP_PORT, + ); + + // wait a bit for peers to discover each other + await sleep(4000); + + console.log("\n\n\n\n\nALL NODES ARE CREATED\n\n\n\n\n"); + + // Replace the p2p node implementation of one of the nodes with a spy such that it does not store transactions that are gossiped to it + const nodeToTurnOff = 0; + jest.spyOn((nodes[nodeToTurnOff] as any).p2pClient.p2pService, 'processTxFromPeer') + .mockImplementation((args: any): Promise => { + console.log("mocked implementation of received transasction from peer", args.getTxHash()); + return Promise.resolve(); + }); + + // In this shuffle, the node that we turned off receipt through gossiping with will be the third to create a block + // And it will not produce a rollup as nothing exists within it's tx pool. + // So we only send transactions to the first two nodes + for (let i = 0; i < 2; i++) { + // The node which we disabled receiving from gossip from will not have any transactions in it's mempool + const context = await createPXEServiceAndSubmitTransactions(nodes[i], NUM_TXS_PER_NODE); + contexts.push(context); + } + + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + await tx.wait(); + logger.info(`Tx ${i}-${j}: ${await tx.getTxHash()} has been mined`); + return await tx.getTxHash(); + }), + ), + ); + + await stopNodes(bootstrapNode, nodes); + }) + it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; const nodes: AztecNodeService[] = await createNodes( @@ -201,11 +259,7 @@ describe('e2e_p2p_network', () => { ); // shutdown all nodes. - // for (const context of contexts) { - for (const context of contexts) { - await context.node.stop(); - await context.pxeService.stop(); - } + await stopNodes(bootstrapNode, newNodes); }); // creates an instance of the PXE and submit a given number of transactions to it. diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 819c8e7eff8..9b2be9e194c 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -72,6 +72,13 @@ export interface P2P { // ^ This pattern is not my favorite (md) registerBlockProposalHandler(handler: (block: BlockProposal) => Promise): void; + /** + * Request a list of transactions from another peer by their tx hashes. + * @param txHashes - Hashes of the txs to query. + * @returns A list of transactions or undefined if the transactions are not found. + */ + requestTxs(txHashes: TxHash[]): Promise<(Tx | undefined)[]>; + /** * Request a transaction from another peer by its tx hash. * @param txHash - Hash of the tx to query. @@ -284,9 +291,25 @@ export class P2PClient implements P2P { this.p2pService.registerBlockReceivedCallback(handler); } - public requestTxByHash(txHash: TxHash): Promise { + // TODO: this will need a timeout + retry mechanism to make sure that we can retreive things on time + public requestTxs(txHashes: TxHash[]): Promise<(Tx | undefined)[]> { + // TODO: adjust this - what if one peer can service all of the requests? then we do not need to send all of these at once and overload the peer + // have a thinky + const requestPromises = txHashes.map(txHash => this.requestTxByHash(txHash)); + + return Promise.all(requestPromises); + } + + public async requestTxByHash(txHash: TxHash): Promise { // Underlying I want to use the libp2p service to just have a request method where the subprotocol is defined here - return this.p2pService.sendRequest(TX_REQ_PROTOCOL, txHash); + console.log("REQUESTING TX BY HASH via req resp!", txHash); + const tx = await this.p2pService.sendRequest(TX_REQ_PROTOCOL, txHash); + console.log("GOT TX FROM REQ RESP", tx); + if (tx) { + // tODO: do i need this await + await this.txPool.addTxs([tx]); + } + return tx; } /** diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 2851c9e9fce..dada63424c2 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -96,8 +96,13 @@ export class ReqResp { ): Promise { try { const stream = await this.libp2p.dialProtocol(peerId, subProtocol); + this.logger.debug(`Stream opened with ${peerId.publicKey} for ${subProtocol}`); const result = await pipe([payload], stream, this.readMessage); + + await stream.close(); + this.logger.debug(`Stream closed with ${peerId.publicKey} for ${subProtocol}`); + return result; } catch (e) { this.logger.warn(`Failed to send request to peer ${peerId.publicKey}`); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 906583826aa..e6efaff427a 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -538,6 +538,7 @@ export class Sequencer { this.log.verbose("ATTEST | Creating block proposal"); const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); + console.log("PROPOSAL send form sequencer ", await proposal.getSender()); this.state = SequencerState.PUBLISHING_BLOCK_TO_PEERS; this.log.verbose("Broadcasting block proposal to validators"); @@ -576,6 +577,7 @@ export class Sequencer { const publishedL2Block = await this.publisher.processL2Block(block, attestations, txHashes); if (publishedL2Block) { + console.log("\n\n\n\nPUBLISHED L2 BLOCK", block.number, " with tx hashes ", txHashes); this.lastPublishedBlock = block.number; } else { throw new Error(`Failed to publish block ${block.number}`); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 5f926cf59ff..9aac5c2c362 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -64,23 +64,57 @@ export class ValidatorClient implements Validator { public registerBlockProposalHandler() { const handler = (block: BlockProposal): Promise => { - return this.validationService.attestToProposal(block); + return this.attestToProposal(block); }; this.p2pClient.registerBlockProposalHandler(handler); } async attestToProposal(proposal: BlockProposal): Promise { - // Check that we have all of the proposal's transactions in our p2p client - const txHashes = proposal.txs; - const haveTx = txHashes.map(txHash => this.p2pClient.getTxStatus(txHash)); - - if (haveTx.length !== txHashes.length) { - console.log("WE ARE MISSING TRANSCTIONS"); - } + // Check that all of the tranasctions in the proposal are available in the tx pool before attesting + await this.ensureTransactionsAreAvailable(proposal); + this.log.debug(`Transactions available, attesting to proposal with ${proposal.txs.length} transactions`); + // If the above function does not throw an error, then we can attest to the proposal return this.validationService.attestToProposal(proposal); } + /** + * Ensure that all of the transactions in the proposal are available in the tx pool before attesting + * + * 1. Check if the local tx pool contains all of the transactions in the proposal + * 2. If any transactions are not in the local tx pool, request them from the network + * 3. If we cannot retrieve them from the network, throw an error + * @param proposal - The proposal to attest to + */ + async ensureTransactionsAreAvailable(proposal: BlockProposal) { + const txHashes: TxHash[] = proposal.txs; + + const transactionStatuses = txHashes.map(txHash => this.p2pClient.getTxStatus(txHash)); + const haveAllTxs = transactionStatuses.every(tx => tx === 'pending' || tx === 'mined'); + + // Only in an if statement here for logging purposes + if (!haveAllTxs) { + const missingTxs: TxHash[] = txHashes.map((_, index) => { + if (!transactionStatuses[index]) { + return txHashes[index]; + } + return undefined; + }).filter(tx => tx !== undefined) as TxHash[]; + + this.log.verbose(`Missing ${missingTxs.length} attestations transactions in the tx pool, requesting from the network`); + + if (missingTxs) { + // If transactions are requested successfully, they will be written into the tx pool + const requestedTxs = await this.p2pClient.requestTxs(missingTxs); + const successfullyRetrievedMissingTxs = requestedTxs.every(tx => tx !== undefined); + if (!successfullyRetrievedMissingTxs) { + throw new Error("Failed to retrieve missing transactions"); + } + } + } + } + + createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { return this.validationService.createBlockProposal(header, archive, txs); } From 4a8d178d97f8af08ae157104450317bb91c2aaff Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 5 Sep 2024 17:40:20 +0000 Subject: [PATCH 011/123] chore: remove logs --- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index e6efaff427a..67527602938 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -244,7 +244,6 @@ export class Sequencer { this._feeRecipient, slot, ); - console.log("new global variables ", newGlobalVariables); // If I created a "partial" header here that should make our job much easier. const proposalHeader = new Header( @@ -538,7 +537,6 @@ export class Sequencer { this.log.verbose("ATTEST | Creating block proposal"); const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); - console.log("PROPOSAL send form sequencer ", await proposal.getSender()); this.state = SequencerState.PUBLISHING_BLOCK_TO_PEERS; this.log.verbose("Broadcasting block proposal to validators"); @@ -572,12 +570,8 @@ export class Sequencer { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - // console.log('attestations', attestations); - // console.log('txHashes', txHashes); - const publishedL2Block = await this.publisher.processL2Block(block, attestations, txHashes); if (publishedL2Block) { - console.log("\n\n\n\nPUBLISHED L2 BLOCK", block.number, " with tx hashes ", txHashes); this.lastPublishedBlock = block.number; } else { throw new Error(`Failed to publish block ${block.number}`); From b4324fcf5513fba838c4f4322f0a082c2ab59796 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 5 Sep 2024 18:15:19 +0000 Subject: [PATCH 012/123] fmt --- .../src/p2p/block_attestation.ts | 11 ++++-- .../circuit-types/src/p2p/block_proposal.ts | 4 +- yarn-project/circuit-types/src/p2p/mocks.ts | 2 +- .../end-to-end/src/e2e_p2p_network.test.ts | 37 +++++++++--------- yarn-project/end-to-end/src/fixtures/utils.ts | 1 - .../end-to-end/src/fixtures/watcher.ts | 1 - yarn-project/foundation/src/config/env_var.ts | 2 +- yarn-project/p2p/src/client/p2p_client.ts | 10 ++--- .../global_variable_builder/global_builder.ts | 2 - .../src/publisher/l1-publisher.ts | 39 ++++--------------- .../src/sequencer/sequencer.test.ts | 1 - .../src/sequencer/sequencer.ts | 11 +++--- yarn-project/validator-client/src/config.ts | 7 +++- .../src/duties/validation_service.ts | 4 +- .../validator-client/src/validator.ts | 33 +++++++++------- 15 files changed, 74 insertions(+), 91 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 087da96fd15..47ff33f90a7 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -5,10 +5,10 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { keccak256, recoverMessageAddress } from 'viem'; +import { TxHash } from '../tx/tx_hash.js'; import { Gossipable } from './gossipable.js'; import { Signature } from './signature.js'; import { TopicType, createTopicString } from './topic_type.js'; -import { TxHash } from '../tx/tx_hash.js'; export class BlockAttestationHash extends Buffer32 { constructor(hash: Buffer) { @@ -71,12 +71,17 @@ export class BlockAttestation extends Gossipable { } toBuffer(): Buffer { - return serializeToBuffer([this.header, this.archive, this.txHashes.length, this.txHashes, this.signature]); + return serializeToBuffer([this.header, this.archive, this.txHashes.length, this.txHashes, this.signature]); } static fromBuffer(buf: Buffer | BufferReader): BlockAttestation { const reader = BufferReader.asReader(buf); - return new BlockAttestation(reader.readObject(Header), reader.readObject(Fr), reader.readArray(reader.readNumber(), TxHash), reader.readObject(Signature)); + return new BlockAttestation( + reader.readObject(Header), + reader.readObject(Fr), + reader.readArray(reader.readNumber(), TxHash), + reader.readObject(Signature), + ); } static empty(): BlockAttestation { diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 3195a8c3f4c..1d74c70212c 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -36,17 +36,15 @@ export class BlockProposal extends Gossipable { /** The sequence of transactions in the block */ public readonly txs: TxHash[], /** The signer of the BlockProposal over the header of the new block*/ - public readonly signature: Signature + public readonly signature: Signature, ) { super(); } - static { this.p2pTopic = createTopicString(TopicType.block_proposal); } - override p2pMessageIdentifier(): Buffer32 { return BlockProposalHash.fromField(this.archive); } diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index a177774371e..f4646e20919 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -1,5 +1,6 @@ import { makeHeader } from '@aztec/circuits.js/testing'; import { Fr } from '@aztec/foundation/fields'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type PrivateKeyAccount } from 'viem'; import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; @@ -8,7 +9,6 @@ import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; import { Signature } from './signature.js'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise => { signer = signer || randomSigner(); diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 306baccc8e1..6958f102db2 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -1,12 +1,18 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { CheatCodes, CompleteAddress, type DebugLogger, Fr, GrumpkinScalar, type SentTx, TxStatus, sleep, +import { + CompleteAddress, + type DebugLogger, type DeployL1Contracts, EthCheatCodes, + Fr, + GrumpkinScalar, + type SentTx, + TxStatus, + sleep, } from '@aztec/aztec.js'; -import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; -import { -} from '@aztec/aztec.js'; +import '@aztec/aztec.js'; +import { EthAddress } from '@aztec/circuits.js'; import { RollupAbi } from '@aztec/l1-artifacts'; import { type BootstrapNode } from '@aztec/p2p'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; @@ -39,7 +45,6 @@ describe('e2e_p2p_network', () => { let teardown: () => Promise; let bootstrapNode: BootstrapNode; let bootstrapNodeEnr: string; - let cheatCodes: CheatCodes; let deployL1ContractsValues: DeployL1Contracts; beforeEach(async () => { @@ -84,13 +89,12 @@ describe('e2e_p2p_network', () => { await cheatCodes.warp(Number(timestamp)); }); - const stopNodes = async (bootstrap: BootstrapNode, nodes: AztecNodeService[]) => { for (const node of nodes) { await node.stop(); } await bootstrap.stop(); - } + }; afterEach(() => teardown()); @@ -140,8 +144,7 @@ describe('e2e_p2p_network', () => { await stopNodes(bootstrapNode, nodes); }); - it("should produce an attestation by requesting tx data over the p2p network", async () => { - + it('should produce an attestation by requesting tx data over the p2p network', async () => { // Birds eye overview of the test // We spin up x nodes // We turn off receiving a tx via gossip from one of the nodes @@ -162,22 +165,20 @@ describe('e2e_p2p_network', () => { config, PEER_ID_PRIVATE_KEYS, bootstrapNodeEnr, - NUM_NODES , + NUM_NODES, BOOT_NODE_UDP_PORT, ); // wait a bit for peers to discover each other await sleep(4000); - console.log("\n\n\n\n\nALL NODES ARE CREATED\n\n\n\n\n"); - // Replace the p2p node implementation of one of the nodes with a spy such that it does not store transactions that are gossiped to it - const nodeToTurnOff = 0; - jest.spyOn((nodes[nodeToTurnOff] as any).p2pClient.p2pService, 'processTxFromPeer') - .mockImplementation((args: any): Promise => { - console.log("mocked implementation of received transasction from peer", args.getTxHash()); + const nodeToTurnOffTxGossip = 0; + jest + .spyOn((nodes[nodeToTurnOffTxGossip] as any).p2pClient.p2pService, 'processTxFromPeer') + .mockImplementation((): Promise => { return Promise.resolve(); - }); + }); // In this shuffle, the node that we turned off receipt through gossiping with will be the third to create a block // And it will not produce a rollup as nothing exists within it's tx pool. @@ -200,7 +201,7 @@ describe('e2e_p2p_network', () => { ); await stopNodes(bootstrapNode, nodes); - }) + }); it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index f64f29469b1..e8c37b0bd67 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -149,7 +149,6 @@ export const setupL1Contracts = async ( }, }; - console.log("Initial validators", args.initialValidators); const l1Data = await deployL1Contracts(l1RpcUrl, account, chain, logger, l1Artifacts, { l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), diff --git a/yarn-project/end-to-end/src/fixtures/watcher.ts b/yarn-project/end-to-end/src/fixtures/watcher.ts index b65b3010f9a..0e332b9b941 100644 --- a/yarn-project/end-to-end/src/fixtures/watcher.ts +++ b/yarn-project/end-to-end/src/fixtures/watcher.ts @@ -62,7 +62,6 @@ export class Watcher { this.logger.error(`Failed to warp to timestamp ${timestamp}: ${e}`); } - console.log("The watcher has jumped slot") this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`); } } catch (err) { diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index c11a3fd0176..1dbdba68c45 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -38,7 +38,7 @@ export type EnvVar = | 'P2P_QUERY_FOR_IP' | 'P2P_TX_POOL_KEEP_PROVEN_FOR' | 'TELEMETRY' - | 'OTEL_SERVICE_NAME' + | 'OTEL_SERVICE_NAME' | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' | 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT' | 'NETWORK_NAME' diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 9b2be9e194c..a16f87b394d 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -296,19 +296,17 @@ export class P2PClient implements P2P { // TODO: adjust this - what if one peer can service all of the requests? then we do not need to send all of these at once and overload the peer // have a thinky const requestPromises = txHashes.map(txHash => this.requestTxByHash(txHash)); - return Promise.all(requestPromises); } public async requestTxByHash(txHash: TxHash): Promise { - // Underlying I want to use the libp2p service to just have a request method where the subprotocol is defined here - console.log("REQUESTING TX BY HASH via req resp!", txHash); - const tx = await this.p2pService.sendRequest(TX_REQ_PROTOCOL, txHash); - console.log("GOT TX FROM REQ RESP", tx); + const tx = await this.p2pService.sendRequest(TX_REQ_PROTOCOL, txHash); + + this.log.debug(`Requested ${txHash.toString()} from peer | success = ${!!tx}`); if (tx) { - // tODO: do i need this await await this.txPool.addTxs([tx]); } + return tx; } diff --git a/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts b/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts index 4904068fb11..91da4e1b46c 100644 --- a/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts +++ b/yarn-project/sequencer-client/src/global_variable_builder/global_builder.ts @@ -69,9 +69,7 @@ export class GlobalVariableBuilder { slotNumber = await this.rollupContract.read.getSlotAt([ts]); } - // TODO: why does this work if I - 1 it const timestamp = await this.rollupContract.read.getTimestampForSlot([slotNumber]); - console.log("timestamp from the chain ", timestamp, " for the slot number ", slotNumber); const slotFr = new Fr(slotNumber); const timestampFr = new Fr(timestamp); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 81aa107fa96..4571e240737 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,7 +1,8 @@ -import { TxHash, type L2Block, type Signature } from '@aztec/circuit-types'; +import { type L2Block, type Signature, type TxHash } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { ETHEREUM_SLOT_DURATION, EthAddress, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; +import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { serializeToBuffer } from '@aztec/foundation/serialize'; @@ -32,7 +33,6 @@ import type * as chains from 'viem/chains'; import { type PublisherConfig, type TxSenderConfig } from './config.js'; import { L1PublisherMetrics } from './l1-publisher-metrics.js'; -import { keccak256 } from '@aztec/foundation/crypto'; /** * Stats for a sent transaction. @@ -92,7 +92,6 @@ export type L1SubmitProofArgs = { aggregationObject: Buffer; }; - export type MetadataForSlot = { proposer: EthAddress; slot: bigint; @@ -251,28 +250,8 @@ export class L1Publisher { }; } - - // /** - // * @notice Calls `getCommitteeAt` with the time of the next Ethereum block - // * @return committee - The committee at the next Ethereum block - // */ - // public async getCommitteeAtNextEthBlock(): Promise { - // // TODO(md): reuse as a helper? - // const lastBlockTimestamp = (await this.publicClient.getBlock()).timestamp; - // console.log('lastBlockTimestamp', lastBlockTimestamp); - // const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); - // console.log('ts', ts); - // const committee = await this.rollupContract.read.getCommitteeAt([ - // ts, - // ]); - // console.log('returned committee', committee); - // return committee.map(EthAddress.fromString); - // } - - public async getCurrentEpochCommittee(): Promise { const committee = await this.rollupContract.read.getCurrentEpochCommittee(); - console.log('returned committee', committee); return committee.map(EthAddress.fromString); } @@ -318,8 +297,6 @@ export class L1Publisher { txHashes: txHashes ?? [], // NTS(md): should be 32 bytes? }; - console.log('proposeTxArgs', proposeTxArgs); - // Publish body and propose block (if not already published) if (!this.interrupted) { let txHash; @@ -555,14 +532,11 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - // console.log('args length', args.length); - // We almost always want to skip simulation here if we are not already within the slot, else we will be one slot ahead if (!L1Publisher.SKIP_SIMULATION) { const simulationResult = await this.rollupContract.simulate.proposeWithBody(args, { account: this.account, }); - console.log(simulationResult); } return await this.rollupContract.write.proposeWithBody(args, { @@ -594,9 +568,12 @@ export class L1Publisher { const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); if (revertError instanceof ContractFunctionRevertedError) { // TODO: turn this into a function - const errorName = revertError.data?.errorName ?? ""; - const args = revertError.metaMessages && revertError.metaMessages?.length > 1 ? revertError.metaMessages[1].trimStart() : ''; - this.log.error(`propose failed with "${errorName}${args}"`) + const errorName = revertError.data?.errorName ?? ''; + const args = + revertError.metaMessages && revertError.metaMessages?.length > 1 + ? revertError.metaMessages[1].trimStart() + : ''; + this.log.error(`propose failed with "${errorName}${args}"`); return undefined; } } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index b15c53b7cf7..e4c65545d12 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -74,7 +74,6 @@ describe('sequencer', () => { const committee = [EthAddress.random()]; const getSignatures = () => [mockedSig]; const getAttestations = () => { - // TODO(md): might be errors in here const attestation = new BlockAttestation(block.header, archive, [], mockedSig); (attestation as any).sender = committee[0]; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 67527602938..435ea0d2b3c 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -6,7 +6,7 @@ import { type ProcessedTx, Signature, Tx, - TxHash, + type TxHash, type TxValidator, } from '@aztec/circuit-types'; import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; @@ -489,9 +489,9 @@ export class Sequencer { const txHashes = validTxs.map(tx => tx.getTxHash()); this.isFlushing = false; - this.log.verbose("Collecting attestations"); + this.log.verbose('Collecting attestations'); const attestations = await this.collectAttestations(block, txHashes); - this.log.verbose("Attestations collected"); + this.log.verbose('Attestations collected'); try { await this.publishL2Block(block, attestations, txHashes); @@ -535,11 +535,11 @@ export class Sequencer { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7974): we do not have transaction[] lists in the block for now // Dont do anything with the proposals for now - just collect them - this.log.verbose("ATTEST | Creating block proposal"); + this.log.verbose('ATTEST | Creating block proposal'); const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); this.state = SequencerState.PUBLISHING_BLOCK_TO_PEERS; - this.log.verbose("Broadcasting block proposal to validators"); + this.log.verbose('Broadcasting block proposal to validators'); this.validatorClient.broadcastBlockProposal(proposal); // Note do we know if it is wating for attestations as it thinks it should be @@ -554,7 +554,6 @@ export class Sequencer { // const selfSign = await this.validatorClient.attestToProposal(proposal); // attestations.push(selfSign); - // note: the smart contract requires that the signatures are provided in the order of the committee return await orderAttestations(attestations, committee); } diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 65333c33961..241bffbfda1 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -1,6 +1,11 @@ import { AZTEC_SLOT_DURATION } from '@aztec/circuits.js'; import { NULL_KEY } from '@aztec/ethereum'; -import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; +import { + type ConfigMappingsType, + booleanConfigHelper, + getConfigFromMappings, + numberConfigHelper, +} from '@aztec/foundation/config'; /** * The Validator Configuration diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 3d8577243e9..6da7e816b76 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -1,10 +1,10 @@ import { BlockAttestation, BlockProposal, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; +import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type ValidatorKeyStore } from '../key_store/interface.js'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; -import { keccak256 } from '@aztec/foundation/crypto'; export class ValidationService { constructor(private keyStore: ValidatorKeyStore) {} diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 9aac5c2c362..79df6c7e25a 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -26,7 +26,6 @@ export interface Validator { /** Validator Client */ export class ValidatorClient implements Validator { - private validationService: ValidationService; constructor( @@ -34,7 +33,8 @@ export class ValidatorClient implements Validator { private p2pClient: P2P, private attestationPoolingIntervalMs: number, private attestationWaitTimeoutMs: number, - private log = createDebugLogger('aztec:validator')) { + private log = createDebugLogger('aztec:validator'), + ) { //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 this.validationService = new ValidationService(keyStore); @@ -48,7 +48,7 @@ export class ValidatorClient implements Validator { localKeyStore, p2pClient, config.attestationPoolingIntervalMs, - config.attestationWaitTimeoutMs + config.attestationWaitTimeoutMs, ); validator.registerBlockProposalHandler(); return validator; @@ -94,27 +94,30 @@ export class ValidatorClient implements Validator { // Only in an if statement here for logging purposes if (!haveAllTxs) { - const missingTxs: TxHash[] = txHashes.map((_, index) => { - if (!transactionStatuses[index]) { - return txHashes[index]; - } - return undefined; - }).filter(tx => tx !== undefined) as TxHash[]; - - this.log.verbose(`Missing ${missingTxs.length} attestations transactions in the tx pool, requesting from the network`); + const missingTxs: TxHash[] = txHashes + .map((_, index) => { + if (!transactionStatuses[index]) { + return txHashes[index]; + } + return undefined; + }) + .filter(tx => tx !== undefined) as TxHash[]; + + this.log.verbose( + `Missing ${missingTxs.length} attestations transactions in the tx pool, requesting from the network`, + ); if (missingTxs) { // If transactions are requested successfully, they will be written into the tx pool const requestedTxs = await this.p2pClient.requestTxs(missingTxs); const successfullyRetrievedMissingTxs = requestedTxs.every(tx => tx !== undefined); if (!successfullyRetrievedMissingTxs) { - throw new Error("Failed to retrieve missing transactions"); + throw new Error('Failed to retrieve missing transactions'); } } } } - createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { return this.validationService.createBlockProposal(header, archive, txs); } @@ -150,7 +153,9 @@ export class ValidatorClient implements Validator { // Rememebr we can subtract 1 from this if we self sign if (attestations.length < numberOfRequiredAttestations) { - this.log.verbose(`SEAN: collected ${attestations.length} attestations so far ${numberOfRequiredAttestations} required`); + this.log.verbose( + `SEAN: collected ${attestations.length} attestations so far ${numberOfRequiredAttestations} required`, + ); this.log.verbose(`Waiting ${this.attestationPoolingIntervalMs}ms for more attestations...`); await sleep(this.attestationPoolingIntervalMs); } From a803a94457d78782475fe3ad2cdbefd8d347a984 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 5 Sep 2024 18:50:05 +0000 Subject: [PATCH 013/123] =?UTF-8?q?=F0=9F=AA=BF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/core/sequencer_selection/Leonidas.sol | 30 +++++++------- l1-contracts/test/sparta/Sparta.t.sol | 39 +++---------------- .../end-to-end/src/e2e_p2p_network.test.ts | 34 +++++++++------- yarn-project/p2p/src/client/p2p_client.ts | 2 - .../src/publisher/l1-publisher.ts | 6 +-- .../src/sequencer/sequencer.test.ts | 1 - .../src/sequencer/sequencer.ts | 14 +------ .../validator-client/src/validator.ts | 5 --- 8 files changed, 44 insertions(+), 87 deletions(-) diff --git a/l1-contracts/src/core/sequencer_selection/Leonidas.sol b/l1-contracts/src/core/sequencer_selection/Leonidas.sol index 1a7d3200a27..ad9550c794f 100644 --- a/l1-contracts/src/core/sequencer_selection/Leonidas.sol +++ b/l1-contracts/src/core/sequencer_selection/Leonidas.sol @@ -128,7 +128,7 @@ contract Leonidas is Ownable, ILeonidas { return epochs[_epoch].committee; } - function getCommitteeAt(uint256 _ts) public view returns (address[] memory) { + function getCommitteeAt(uint256 _ts) internal view returns (address[] memory) { uint256 epochNumber = getEpochAt(_ts); Epoch storage epoch = epochs[epochNumber]; @@ -293,23 +293,23 @@ contract Leonidas is Ownable, ILeonidas { return address(0); } - // Epoch storage epoch = epochs[epochNumber]; + Epoch storage epoch = epochs[epochNumber]; - // // If the epoch is setup, we can just return the proposer. Otherwise we have to emulate sampling - // if (epoch.sampleSeed != 0) { - // uint256 committeeSize = epoch.committee.length; - // if (committeeSize == 0) { - // return address(0); - // } + // If the epoch is setup, we can just return the proposer. Otherwise we have to emulate sampling + if (epoch.sampleSeed != 0) { + uint256 committeeSize = epoch.committee.length; + if (committeeSize == 0) { + return address(0); + } - // return - // epoch.committee[_computeProposerIndex(epochNumber, slot, epoch.sampleSeed, committeeSize)]; - // } + return + epoch.committee[_computeProposerIndex(epochNumber, slot, epoch.sampleSeed, committeeSize)]; + } - // // Allow anyone if there is no validator set - // if (validatorSet.length() == 0) { - // return address(0); - // } + // Allow anyone if there is no validator set + if (validatorSet.length() == 0) { + return address(0); + } // Emulate a sampling of the validators uint256 sampleSeed = _getSampleSeed(epochNumber); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index a915b3996c8..a6f802dc102 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -26,8 +26,6 @@ import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; * The tests in this file is testing the sequencer selection */ -import "forge-std/console.sol"; - contract SpartaTest is DecoderBase { using MessageHashUtils for bytes32; @@ -186,14 +184,18 @@ contract SpartaTest is DecoderBase { rollup.setupEpoch(); + // TODO: include these in the base block and include in the load function + bytes32[] memory txHashes = new bytes32[](0); + if (_signatureCount > 0 && ree.proposer != address(0)) { address[] memory validators = rollup.getEpochCommittee(rollup.getCurrentEpoch()); ree.needed = validators.length * 2 / 3 + 1; SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](_signatureCount); + bytes32 digest = keccak256(abi.encodePacked(archive, txHashes)); for (uint256 i = 0; i < _signatureCount; i++) { - signatures[i] = createSignature(validators[i], archive); + signatures[i] = createSignature(validators[i], digest); } if (_expectRevert) { @@ -222,9 +224,6 @@ contract SpartaTest is DecoderBase { ree.shouldRevert = true; } - // TODO(md): this is temp and probably will not pass - bytes32[] memory txHashes = new bytes32[](0); - vm.prank(ree.proposer); rollup.propose(header, archive, bytes32(0), txHashes, signatures); @@ -278,34 +277,6 @@ contract SpartaTest is DecoderBase { assertEq(rollup.archive(), archive, "Invalid archive"); } - - // We need to make sure that the get committee function is actually - // working the way we expect it to - // This is blowing up the test that we want to test - function testGetCommitteeAtNonSetupEpoch() public setup(4) { - if (Constants.IS_DEV_NET == 1) { - return; - } - - uint256 _epochsToJump = 1; - uint256 pre = rollup.getCurrentEpoch(); - vm.warp( - block.timestamp + uint256(_epochsToJump) * rollup.EPOCH_DURATION() * rollup.SLOT_DURATION() - ); - uint256 post = rollup.getCurrentEpoch(); - assertEq(pre + _epochsToJump, post, "Invalid epoch"); - - // Test that the committee returned from getCommitteeAt is the same as the one returned from getCurrentEpochCommittee - address[] memory committeeAtNow = rollup.getCommitteeAt(block.timestamp); - address[] memory currentCommittee = rollup.getCurrentEpochCommittee(); - assertEq(currentCommittee.length, 4, "Committee should be empty"); - assertEq(committeeAtNow.length, currentCommittee.length, "Committee now and get committee should be the same length"); - - for (uint256 i = 0; i < currentCommittee.length; i++) { - assertEq(currentCommittee[i], committeeAtNow[i], "Committee now and get committee should be the same length"); - } - } - function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { for (uint256 i = 0; i < _contents.length; i++) { vm.prank(_sender); diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 3f9908d9a64..512538585b1 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -164,22 +164,27 @@ describe('e2e_p2p_network', () => { await stopNodes(bootstrapNode, nodes); }); + // NOTE: If this test fails in a PR where the shuffling algorithm is changed, then it is failing as the node with + // the mocked p2p layer is being picked as the sequencer, and it does not have any transactions in it's mempool. + // If this is the case, then we should update the test to switch off the mempool of a different node. + // adjust `nodeToTurnOffTxGossip` in the test below. it('should produce an attestation by requesting tx data over the p2p network', async () => { - // Birds eye overview of the test - // We spin up x nodes - // We turn off receiving a tx via gossip from one of the nodes - // We send a transaction and gossip it to other nodes - // This node will receive an attestation that it does not have the data for - // It will request this data over the p2p layer - // We receive all of the attestations that we need and we produce the block + /** + * Birds eye overview of the test + * 1. We spin up x nodes + * 2. We turn off receiving a tx via gossip from one of the nodes + * 3. We send a transaction and gossip it to other nodes + * 4. This node will receive an attestation that it does not have the data for + * 5. It will request this data over the p2p layer + * 6. We receive all of the attestations that we need and we produce the block + * + * Note: we do not attempt to let this node produce a block, as it will not have received any transactions + * from the other pxes. + */ if (!bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); } - // create our network of nodes and submit txs into each of them - // the number of txs per node and the number of txs per rollup - // should be set so that the only way for rollups to be built - // is if the txs are successfully gossiped around the nodes. const contexts: NodeContext[] = []; const nodes: AztecNodeService[] = await createNodes( config, @@ -193,6 +198,7 @@ describe('e2e_p2p_network', () => { await sleep(4000); // Replace the p2p node implementation of one of the nodes with a spy such that it does not store transactions that are gossiped to it + // Original implementation of `processTxFromPeer` will store received transactions in the tx pool. const nodeToTurnOffTxGossip = 0; jest .spyOn((nodes[nodeToTurnOffTxGossip] as any).p2pClient.p2pService, 'processTxFromPeer') @@ -200,11 +206,9 @@ describe('e2e_p2p_network', () => { return Promise.resolve(); }); - // In this shuffle, the node that we turned off receipt through gossiping with will be the third to create a block - // And it will not produce a rollup as nothing exists within it's tx pool. - // So we only send transactions to the first two nodes + // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. + // If the shuffling algorithm changes, then this will need to be updated. for (let i = 0; i < 2; i++) { - // The node which we disabled receiving from gossip from will not have any transactions in it's mempool const context = await createPXEServiceAndSubmitTransactions(nodes[i], NUM_TXS_PER_NODE); contexts.push(context); } diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index a16f87b394d..4b079c9d5b3 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -293,8 +293,6 @@ export class P2PClient implements P2P { // TODO: this will need a timeout + retry mechanism to make sure that we can retreive things on time public requestTxs(txHashes: TxHash[]): Promise<(Tx | undefined)[]> { - // TODO: adjust this - what if one peer can service all of the requests? then we do not need to send all of these at once and overload the peer - // have a thinky const requestPromises = txHashes.map(txHash => this.requestTxByHash(txHash)); return Promise.all(requestPromises); } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 8fbb6cb7445..d1a59a138af 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -264,7 +264,7 @@ export class L1Publisher { blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), attestations, - txHashes: txHashes ?? [], // NTS(md): should be 32 bytes? + txHashes: txHashes ?? [], }; // Publish body and propose block (if not already published) @@ -503,6 +503,7 @@ export class L1Publisher { ] as const; // We almost always want to skip simulation here if we are not already within the slot, else we will be one slot ahead + // See comment attached to static SKIP_SIMULATION definition if (!L1Publisher.SKIP_SIMULATION) { const simulationResult = await this.rollupContract.simulate.proposeWithBody(args, { account: this.account, @@ -521,19 +522,18 @@ export class L1Publisher { ] as const; // We almost always want to skip simulation here if we are not already within the slot, else we will be one slot ahead + // See comment attached to static SKIP_SIMULATION definition if (!L1Publisher.SKIP_SIMULATION) { await this.rollupContract.simulate.propose(args, { account: this.account, }); } - // TODO(md): first tx is failing in here return await this.rollupContract.write.propose(args, { account: this.account, }); } } catch (err) { - // TODO: tidy up this error if (err instanceof BaseError) { const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); if (revertError instanceof ContractFunctionRevertedError) { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 4afc7a35a64..73b441a58eb 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -75,7 +75,6 @@ describe('sequencer', () => { const committee = [EthAddress.random()]; const getSignatures = () => [mockedSig]; const getAttestations = () => { - // TODO(md): might be errors in here const attestation = new BlockAttestation(block.header, archive, [], mockedSig); (attestation as any).sender = committee[0]; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 77779a1166a..f57720ea370 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -520,12 +520,10 @@ export class Sequencer { protected async collectAttestations(block: L2Block, txHashes: TxHash[]): Promise { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7962): inefficient to have a round trip in here - this should be cached const committee = await this.publisher.getCurrentEpochCommittee(); - this.log.verbose(`ATTEST | committee length ${committee.length}`); + this.log.debug(`Attesting committee length ${committee.length}`); if (committee.length === 0) { - this.log.verbose(`ATTEST | committee length is 0, skipping`); - // TODO(md): remove error - throw new Error('Committee length is 0, WHAT'); + this.log.debug(`Attesting committee length is 0, skipping`); return undefined; } @@ -547,18 +545,10 @@ export class Sequencer { this.log.verbose('Broadcasting block proposal to validators'); this.validatorClient.broadcastBlockProposal(proposal); - // Note do we know if it is wating for attestations as it thinks it should be - // proposing the block? - this.state = SequencerState.WAITING_FOR_ATTESTATIONS; const attestations = await this.validatorClient.collectAttestations(proposal, numberOfRequiredAttestations); this.log.verbose(`Collected attestations from validators, number of attestations: ${attestations.length}`); - // NOTE: is the self report now done in the method above - // // TODO: clean: SELF REPORT LMAO - // const selfSign = await this.validatorClient.attestToProposal(proposal); - // attestations.push(selfSign); - // note: the smart contract requires that the signatures are provided in the order of the committee return await orderAttestations(attestations, committee); } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 79df6c7e25a..838654160ca 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -126,9 +126,7 @@ export class ValidatorClient implements Validator { this.p2pClient.broadcastProposal(proposal); } - // Target is temporarily hardcoded, for a test, but will be calculated from smart contract // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7962) - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7976): require suitable timeouts async collectAttestations( proposal: BlockProposal, numberOfRequiredAttestations: number, @@ -140,9 +138,6 @@ export class ValidatorClient implements Validator { const slot = proposal.header.globalVariables.slotNumber.toBigInt(); - // TODO: tidy - numberOfRequiredAttestations -= 1; // We self sign - this.log.info(`Waiting for ${numberOfRequiredAttestations} attestations for slot: ${slot}`); const myAttestation = await this.attestToProposal(proposal); From 164c117e396d83fa4210db2a0715555fa0395416 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:36:00 +0000 Subject: [PATCH 014/123] chore: validator tests --- .../end-to-end/src/e2e_p2p_network.test.ts | 25 ++++--- yarn-project/p2p/src/errors/index.ts | 1 + yarn-project/p2p/src/errors/reqresp.error.ts | 14 ++++ .../p2p/src/service/reqresp/interface.ts | 6 +- yarn-project/validator-client/package.json | 1 + .../validator-client/src/errors/index.ts | 1 + .../src/errors/validator.error.ts | 19 +++++ .../validator-client/src/validator.test.ts | 71 ++++++++++++++++++ .../validator-client/src/validator.ts | 75 +++++++------------ yarn-project/yarn.lock | 3 +- 10 files changed, 154 insertions(+), 62 deletions(-) create mode 100644 yarn-project/p2p/src/errors/index.ts create mode 100644 yarn-project/p2p/src/errors/reqresp.error.ts create mode 100644 yarn-project/validator-client/src/errors/index.ts create mode 100644 yarn-project/validator-client/src/errors/validator.error.ts create mode 100644 yarn-project/validator-client/src/validator.test.ts diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 512538585b1..c46adb23f26 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -172,10 +172,10 @@ describe('e2e_p2p_network', () => { /** * Birds eye overview of the test * 1. We spin up x nodes - * 2. We turn off receiving a tx via gossip from one of the nodes - * 3. We send a transaction and gossip it to other nodes - * 4. This node will receive an attestation that it does not have the data for - * 5. It will request this data over the p2p layer + * 2. We turn off receiving a tx via gossip from two of the nodes + * 3. We send a transactions and gossip it to other nodes + * 4. The disabled nodes will receive an attestation that it does not have the data for + * 5. They will request this data over the p2p layer * 6. We receive all of the attestations that we need and we produce the block * * Note: we do not attempt to let this node produce a block, as it will not have received any transactions @@ -197,14 +197,17 @@ describe('e2e_p2p_network', () => { // wait a bit for peers to discover each other await sleep(4000); - // Replace the p2p node implementation of one of the nodes with a spy such that it does not store transactions that are gossiped to it + // Replace the p2p node implementation of some of the nodes with a spy such that it does not store transactions that are gossiped to it // Original implementation of `processTxFromPeer` will store received transactions in the tx pool. - const nodeToTurnOffTxGossip = 0; - jest - .spyOn((nodes[nodeToTurnOffTxGossip] as any).p2pClient.p2pService, 'processTxFromPeer') - .mockImplementation((): Promise => { - return Promise.resolve(); - }); + // We have chosen nodes 0,2 as they do not get chosen to be the sequencer in this test ( node 1 does ). + const nodeToTurnOffTxGossip = [0, 2]; + for (const nodeIndex of nodeToTurnOffTxGossip) { + jest + .spyOn((nodes[nodeIndex] as any).p2pClient.p2pService, 'processTxFromPeer') + .mockImplementation((): Promise => { + return Promise.resolve(); + }); + } // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. // If the shuffling algorithm changes, then this will need to be updated. diff --git a/yarn-project/p2p/src/errors/index.ts b/yarn-project/p2p/src/errors/index.ts new file mode 100644 index 00000000000..01b77d2a6a2 --- /dev/null +++ b/yarn-project/p2p/src/errors/index.ts @@ -0,0 +1 @@ +export * from './reqresp.error.js'; diff --git a/yarn-project/p2p/src/errors/reqresp.error.ts b/yarn-project/p2p/src/errors/reqresp.error.ts new file mode 100644 index 00000000000..1349205cd74 --- /dev/null +++ b/yarn-project/p2p/src/errors/reqresp.error.ts @@ -0,0 +1,14 @@ +import { type ReqRespSubProtocol, TX_REQ_PROTOCOL } from '../service/reqresp/interface.js'; + +export class ReqRespError extends Error { + constructor(protocol: ReqRespSubProtocol, message: string) { + super(message); + } +} + +// TODO(md): think about what these errors should ideally be +export class TxHandlerReqRespError extends ReqRespError { + constructor() { + super(TX_REQ_PROTOCOL, 'Could not perform tx handler request response'); + } +} diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 39f27b6268f..5ae61d0389d 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -3,9 +3,9 @@ import { Tx, TxHash } from '@aztec/circuit-types'; /* * Request Response Sub Protocols */ -export const PING_PROTOCOL = '/aztec/ping/0.1.0'; -export const STATUS_PROTOCOL = '/aztec/status/0.1.0'; -export const TX_REQ_PROTOCOL = '/aztec/tx_req/0.1.0'; +export const PING_PROTOCOL = '/aztec/req/ping/0.1.0'; +export const STATUS_PROTOCOL = '/aztec/req/status/0.1.0'; +export const TX_REQ_PROTOCOL = '/aztec/req/tx/0.1.0'; // Sum type for sub protocols export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | typeof TX_REQ_PROTOCOL; diff --git a/yarn-project/validator-client/package.json b/yarn-project/validator-client/package.json index e89785cf9dc..23ca3b59106 100644 --- a/yarn-project/validator-client/package.json +++ b/yarn-project/validator-client/package.json @@ -72,6 +72,7 @@ "@types/jest": "^29.5.0", "@types/node": "^18.7.23", "jest": "^29.5.0", + "jest-mock-extended": "^3.0.7", "ts-node": "^10.9.1", "typescript": "^5.0.4" }, diff --git a/yarn-project/validator-client/src/errors/index.ts b/yarn-project/validator-client/src/errors/index.ts new file mode 100644 index 00000000000..00a5c872c01 --- /dev/null +++ b/yarn-project/validator-client/src/errors/index.ts @@ -0,0 +1 @@ +export * from './validator.error.js'; diff --git a/yarn-project/validator-client/src/errors/validator.error.ts b/yarn-project/validator-client/src/errors/validator.error.ts new file mode 100644 index 00000000000..5cc3929962e --- /dev/null +++ b/yarn-project/validator-client/src/errors/validator.error.ts @@ -0,0 +1,19 @@ +import { type TxHash } from '@aztec/circuit-types/tx_hash'; + +export class ValidatorError extends Error { + constructor(message: string) { + super(message); + } +} + +export class AttestationTimeoutError extends ValidatorError { + constructor(numberOfRequiredAttestations: number, slot: bigint) { + super(`Timeout waiting for ${numberOfRequiredAttestations} attestations for slot, ${slot}`); + } +} + +export class TransactionsNotAvailableError extends ValidatorError { + constructor(txHashes: TxHash[]) { + super(`Transactions not available: ${txHashes.join(', ')}`); + } +} diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts new file mode 100644 index 00000000000..ec238e14077 --- /dev/null +++ b/yarn-project/validator-client/src/validator.test.ts @@ -0,0 +1,71 @@ +/** + * Validation logic unit tests + */ +import { TxHash } from '@aztec/circuit-types'; +import { makeHeader } from '@aztec/circuits.js/testing'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Fr } from '@aztec/foundation/fields'; +import { type P2P } from '@aztec/p2p'; + +import { describe, expect, it } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; +import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; + +import { makeBlockProposal } from '../../circuit-types/src/p2p/mocks.js'; +import { AttestationTimeoutError, TransactionsNotAvailableError } from './errors/validator.error.js'; +import { ValidatorClient } from './validator.js'; + +describe('ValidationService', () => { + let validatorClient: ValidatorClient; + let p2pClient: MockProxy; + let validatorAccount: PrivateKeyAccount; + + beforeEach(() => { + p2pClient = mock(); + p2pClient.getAttestationsForSlot.mockImplementation(() => Promise.resolve([])); + + const validatorPrivateKey = generatePrivateKey(); + validatorAccount = privateKeyToAccount(validatorPrivateKey); + + const config = { + validatorPrivateKey: validatorPrivateKey, + attestationPoolingIntervalMs: 1000, + attestationWaitTimeoutMs: 1000, + disableValidator: false, + }; + validatorClient = ValidatorClient.new(config, p2pClient); + }); + + it('Should create a valid block proposal', async () => { + const header = makeHeader(); + const archive = Fr.random(); + const txs = [1, 2, 3, 4, 5].map(() => TxHash.random()); + + const blockProposal = await validatorClient.createBlockProposal(header, archive, txs); + + expect(blockProposal).toBeDefined(); + + const validatorAddress = EthAddress.fromString(validatorAccount.address); + expect(await blockProposal.getSender()).toEqual(validatorAddress); + }); + + // TODO: add a test on the sequencer that it can recover in case that this timeout is hit + it('Should a timeout if we do not collect enough attestations in time', async () => { + const proposal = await makeBlockProposal(); + + await expect(validatorClient.collectAttestations(proposal, 2)).rejects.toThrow(AttestationTimeoutError); + }); + + it('Should throw an error if the transactions are not available', async () => { + const proposal = await makeBlockProposal(); + + // mock the p2pClient.getTxStatus to return undefined for all transactions + p2pClient.getTxStatus.mockImplementation(() => undefined); + // Mock the p2pClient.requestTxs to return undefined for all transactions + p2pClient.requestTxs.mockImplementation(() => Promise.resolve([undefined])); + + await expect(validatorClient.ensureTransactionsAreAvailable(proposal)).rejects.toThrow( + TransactionsNotAvailableError, + ); + }); +}); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 838654160ca..a4ccd25f6ba 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -7,6 +7,7 @@ import { type P2P } from '@aztec/p2p'; import { type ValidatorClientConfig } from './config.js'; import { ValidationService } from './duties/validation_service.js'; +import { AttestationTimeoutError, TransactionsNotAvailableError } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; @@ -18,7 +19,6 @@ export interface Validator { createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise; attestToProposal(proposal: BlockProposal): void; - // TODO(md): possible abstraction leak broadcastBlockProposal(proposal: BlockProposal): void; collectAttestations(proposal: BlockProposal, numberOfRequiredAttestations: number): Promise; } @@ -88,33 +88,20 @@ export class ValidatorClient implements Validator { */ async ensureTransactionsAreAvailable(proposal: BlockProposal) { const txHashes: TxHash[] = proposal.txs; + const transactionStatuses = await Promise.all(txHashes.map(txHash => this.p2pClient.getTxStatus(txHash))); - const transactionStatuses = txHashes.map(txHash => this.p2pClient.getTxStatus(txHash)); - const haveAllTxs = transactionStatuses.every(tx => tx === 'pending' || tx === 'mined'); + const missingTxs = txHashes.filter((_, index) => !['pending', 'mined'].includes(transactionStatuses[index] ?? '')); - // Only in an if statement here for logging purposes - if (!haveAllTxs) { - const missingTxs: TxHash[] = txHashes - .map((_, index) => { - if (!transactionStatuses[index]) { - return txHashes[index]; - } - return undefined; - }) - .filter(tx => tx !== undefined) as TxHash[]; + if (missingTxs.length === 0) { + return; // All transactions are available + } - this.log.verbose( - `Missing ${missingTxs.length} attestations transactions in the tx pool, requesting from the network`, - ); + this.log.verbose(`Missing ${missingTxs.length} transactions in the tx pool, requesting from the network`); - if (missingTxs) { - // If transactions are requested successfully, they will be written into the tx pool - const requestedTxs = await this.p2pClient.requestTxs(missingTxs); - const successfullyRetrievedMissingTxs = requestedTxs.every(tx => tx !== undefined); - if (!successfullyRetrievedMissingTxs) { - throw new Error('Failed to retrieve missing transactions'); - } - } + const requestedTxs = await this.p2pClient.requestTxs(missingTxs); + if (requestedTxs.some(tx => tx === undefined)) { + this.log.error(`Failed to request transactions from the network: ${missingTxs.join(', ')}`); + throw new TransactionsNotAvailableError(missingTxs); } } @@ -131,38 +118,32 @@ export class ValidatorClient implements Validator { proposal: BlockProposal, numberOfRequiredAttestations: number, ): Promise { - // Wait and poll the p2pClients attestation pool for this block - // until we have enough attestations - - const startTime = Date.now(); - + // Wait and poll the p2pClient's attestation pool for this block until we have enough attestations const slot = proposal.header.globalVariables.slotNumber.toBigInt(); - this.log.info(`Waiting for ${numberOfRequiredAttestations} attestations for slot: ${slot}`); - const myAttestation = await this.attestToProposal(proposal); + const myAttestation = await this.validationService.attestToProposal(proposal); + + const startTime = Date.now(); - let attestations: BlockAttestation[] = []; - while (attestations.length < numberOfRequiredAttestations) { - attestations = [myAttestation, ...(await this.p2pClient.getAttestationsForSlot(slot))]; + while (true) { + const attestations = [myAttestation, ...(await this.p2pClient.getAttestationsForSlot(slot))]; - // Rememebr we can subtract 1 from this if we self sign - if (attestations.length < numberOfRequiredAttestations) { - this.log.verbose( - `SEAN: collected ${attestations.length} attestations so far ${numberOfRequiredAttestations} required`, - ); - this.log.verbose(`Waiting ${this.attestationPoolingIntervalMs}ms for more attestations...`); - await sleep(this.attestationPoolingIntervalMs); + if (attestations.length >= numberOfRequiredAttestations) { + this.log.info(`Collected all ${numberOfRequiredAttestations} attestations for slot, ${slot}`); + return attestations; } - // FIX(md): kinna sad looking code - if (Date.now() - startTime > this.attestationWaitTimeoutMs) { + const elapsedTime = Date.now() - startTime; + if (elapsedTime > this.attestationWaitTimeoutMs) { this.log.error(`Timeout waiting for ${numberOfRequiredAttestations} attestations for slot, ${slot}`); - throw new Error(`Timeout waiting for ${numberOfRequiredAttestations} attestations for slot, ${slot}`); + throw new AttestationTimeoutError(numberOfRequiredAttestations, slot); } - } - this.log.info(`Collected all attestations for slot, ${slot}`); - return attestations; + this.log.verbose( + `Collected ${attestations.length} attestations so far, waiting ${this.attestationPoolingIntervalMs}ms for more...`, + ); + await sleep(this.attestationPoolingIntervalMs); + } } } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index cc8bf108dd1..b91fdb4f6fa 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1219,6 +1219,7 @@ __metadata: "@types/jest": ^29.5.0 "@types/node": ^18.7.23 jest: ^29.5.0 + jest-mock-extended: ^3.0.7 koa: ^2.14.2 koa-router: ^12.0.0 ts-node: ^10.9.1 @@ -10802,7 +10803,7 @@ __metadata: languageName: node linkType: hard -"jest-mock-extended@npm:^3.0.3, jest-mock-extended@npm:^3.0.4, jest-mock-extended@npm:^3.0.5": +"jest-mock-extended@npm:^3.0.3, jest-mock-extended@npm:^3.0.4, jest-mock-extended@npm:^3.0.5, jest-mock-extended@npm:^3.0.7": version: 3.0.7 resolution: "jest-mock-extended@npm:3.0.7" dependencies: From 27da59d27d7c6637a5c8916a8c30d9745c33da3e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:40:04 +0000 Subject: [PATCH 015/123] Add timeouts to individual reqresp connections --- .../composed/integration_l1_publisher.test.ts | 4 +- yarn-project/foundation/src/timer/index.ts | 2 +- yarn-project/foundation/src/timer/timeout.ts | 11 ++++- yarn-project/p2p/src/errors/reqresp.error.ts | 25 +++++++---- .../p2p/src/service/reqresp/reqresp.test.ts | 22 +++++++++- .../p2p/src/service/reqresp/reqresp.ts | 34 +++++++++++--- .../src/publisher/l1-publisher.test.ts | 22 +++++----- .../src/publisher/l1-publisher.ts | 7 ++- .../src/sequencer/sequencer.test.ts | 44 +++++++++---------- .../src/sequencer/sequencer.ts | 4 +- 10 files changed, 116 insertions(+), 59 deletions(-) diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index cd097c23658..018b1cef1be 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -408,7 +408,7 @@ describe('L1Publisher integration', () => { writeJson(`mixed_block_${block.number}`, block, l1ToL2Content, recipientAddress, deployerAccount.address); - await publisher.processL2Block(block); + await publisher.proposeL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, @@ -508,7 +508,7 @@ describe('L1Publisher integration', () => { writeJson(`empty_block_${block.number}`, block, [], AztecAddress.ZERO, deployerAccount.address); - await publisher.processL2Block(block); + await publisher.proposeL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, diff --git a/yarn-project/foundation/src/timer/index.ts b/yarn-project/foundation/src/timer/index.ts index a883953799d..972248c8f41 100644 --- a/yarn-project/foundation/src/timer/index.ts +++ b/yarn-project/foundation/src/timer/index.ts @@ -1,3 +1,3 @@ -export { TimeoutTask } from './timeout.js'; +export { TimeoutTask, executeTimeoutWithCustomError } from './timeout.js'; export { Timer } from './timer.js'; export { elapsed, elapsedSync } from './elapsed.js'; diff --git a/yarn-project/foundation/src/timer/timeout.ts b/yarn-project/foundation/src/timer/timeout.ts index 08bc4ea220e..a498dbf8cdd 100644 --- a/yarn-project/foundation/src/timer/timeout.ts +++ b/yarn-project/foundation/src/timer/timeout.ts @@ -10,9 +10,9 @@ export class TimeoutTask { private interrupt = () => {}; private totalTime = 0; - constructor(private fn: () => Promise, private timeout = 0, fnName = '') { + constructor(private fn: () => Promise, private timeout = 0, fnName = '', error = () => new Error(`Timeout${fnName ? ` running ${fnName}` : ''} after ${timeout}ms.`)) { this.interruptPromise = new Promise((_, reject) => { - this.interrupt = () => reject(new Error(`Timeout${fnName ? ` running ${fnName}` : ''} after ${timeout}ms.`)); + this.interrupt = () => reject(error()); }); } @@ -28,7 +28,9 @@ export class TimeoutTask { const interruptTimeout = !this.timeout ? 0 : setTimeout(this.interrupt, this.timeout); try { const start = Date.now(); + console.log("before race"); const result = await Promise.race([this.fn(), this.interruptPromise]); + console.log("after race", result); this.totalTime = Date.now() - start; return result; } finally { @@ -62,3 +64,8 @@ export const executeTimeout = async (fn: () => Promise, timeout = 0, fnNam const task = new TimeoutTask(fn, timeout, fnName); return await task.exec(); }; + +export const executeTimeoutWithCustomError = async (fn: () => Promise, timeout = 0, error = () => new Error("No custom error provided"), fnName = '') => { + const task = new TimeoutTask(fn, timeout, fnName, error); + return await task.exec(); +}; diff --git a/yarn-project/p2p/src/errors/reqresp.error.ts b/yarn-project/p2p/src/errors/reqresp.error.ts index 1349205cd74..f70721efc5e 100644 --- a/yarn-project/p2p/src/errors/reqresp.error.ts +++ b/yarn-project/p2p/src/errors/reqresp.error.ts @@ -1,14 +1,21 @@ -import { type ReqRespSubProtocol, TX_REQ_PROTOCOL } from '../service/reqresp/interface.js'; - -export class ReqRespError extends Error { - constructor(protocol: ReqRespSubProtocol, message: string) { - super(message); +/** Individual request timeout error + * + * This error will be thrown when a request to a specific peer times out. + * @category Errors + */ +export class IndiviualReqRespTimeoutError extends Error { + constructor() { + super(`Request to peer timed out`); } } -// TODO(md): think about what these errors should ideally be -export class TxHandlerReqRespError extends ReqRespError { +/** Collective request timeout error + * + * This error will be thrown when a req resp request times out regardless of the peer. + * @category Errors + */ +export class CollectiveReqRespTimeoutError extends Error { constructor() { - super(TX_REQ_PROTOCOL, 'Could not perform tx handler request response'); + super(`Request to all peers timed out`); } -} +} \ No newline at end of file diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index de9d931bd12..b1c6ebdaab1 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -1,7 +1,7 @@ import { TxHash, mockTx } from '@aztec/circuit-types'; import { sleep } from '@aztec/foundation/sleep'; -import { describe, expect, it } from '@jest/globals'; +import { describe, expect, it, jest } from '@jest/globals'; import { MOCK_SUB_PROTOCOL_HANDLERS, connectToPeers, createNodes, startNodes, stopNodes } from '../../mocks/index.js'; import { PING_PROTOCOL, TX_REQ_PROTOCOL } from './interface.js'; @@ -120,5 +120,25 @@ describe('ReqResp', () => { await stopNodes(nodes); }); + + it('Should timeout if nothing is returned over the stream', async () => { + const nodes = await createNodes(2); + + await startNodes(nodes); + + console.log((nodes[1].req as any).subProtocolHandlers[TX_REQ_PROTOCOL]) + jest.spyOn((nodes[1].req as any).subProtocolHandlers, TX_REQ_PROTOCOL).mockImplementation(() => { + return new Promise(() => {}); + }); + + await sleep(500); + await connectToPeers(nodes); + await sleep(500); + + const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, Buffer.from('ping')); + expect(res).toBeUndefined(); + + await stopNodes(nodes); + }); }); }); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index dada63424c2..496e2b420d4 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -1,7 +1,7 @@ // @attribution: lodestar impl for inspiration import { type Logger, createDebugLogger } from '@aztec/foundation/log'; -import { type IncomingStreamData, type PeerId } from '@libp2p/interface'; +import { Stream, type IncomingStreamData, type PeerId } from '@libp2p/interface'; import { pipe } from 'it-pipe'; import { type Libp2p } from 'libp2p'; import { type Uint8ArrayList } from 'uint8arraylist'; @@ -11,6 +11,8 @@ import { type ReqRespSubProtocol, type ReqRespSubProtocolHandlers, } from './interface.js'; +import { executeTimeoutWithCustomError } from '@aztec/foundation/timer'; +import { IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; /** * The Request Response Service @@ -28,6 +30,10 @@ export class ReqResp { private abortController: AbortController = new AbortController(); + // TODO: change defaults and add to configuration + private overallRequestTimeoutMs: number = 4000; + private individualRequestTimeoutMs: number = 2000; + private subProtocolHandlers: ReqRespSubProtocolHandlers = DEFAULT_SUB_PROTOCOL_HANDLERS; constructor(protected readonly libp2p: Libp2p) { @@ -71,6 +77,7 @@ export class ReqResp { // Attempt to ask all of our peers for (const peer of peers) { const response = await this.sendRequestToPeer(peer, subProtocol, payload); + console.log("In send request response", response); // If we get a response, return it, otherwise we iterate onto the next peer // We do not consider it a success if we have an empty buffer @@ -94,18 +101,35 @@ export class ReqResp { subProtocol: ReqRespSubProtocol, payload: Buffer, ): Promise { + let stream: Stream | undefined; try { - const stream = await this.libp2p.dialProtocol(peerId, subProtocol); + stream = await this.libp2p.dialProtocol(peerId, subProtocol); + this.logger.debug(`Stream opened with ${peerId.publicKey} for ${subProtocol}`); - const result = await pipe([payload], stream, this.readMessage); + const result = await executeTimeoutWithCustomError( + (): Promise => pipe([payload], stream!, this.readMessage), + this.overallRequestTimeoutMs, + () => new IndiviualReqRespTimeoutError(), + ); + console.log("after timeout check", result); await stream.close(); this.logger.debug(`Stream closed with ${peerId.publicKey} for ${subProtocol}`); + console.log("after stream close", result); return result; - } catch (e) { - this.logger.warn(`Failed to send request to peer ${peerId.publicKey}`); + } catch (e: any) { + this.logger.error(`${e.message} | peer: ${peerId.publicKey?.toString()} | subProtocol: ${subProtocol}`); + } finally { + if (stream) { + try { + await stream.close(); + this.logger.debug(`Stream closed with ${peerId.publicKey} for ${subProtocol}`); + } catch (closeError) { + this.logger.error(`Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`); + } + } return undefined; } } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 82f466b1151..f1a4825220c 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -155,7 +155,7 @@ describe('L1Publisher', () => { rollupContractSimulate.propose.mockResolvedValueOnce(proposeTxHash); publicClient.getTransactionReceipt.mockResolvedValueOnce(proposeTxReceipt); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(true); @@ -179,7 +179,7 @@ describe('L1Publisher', () => { rollupContractSimulate.propose.mockResolvedValueOnce(processTxHash); publicClient.getTransactionReceipt.mockResolvedValueOnce(processTxReceipt); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(true); const args = [ @@ -206,7 +206,7 @@ describe('L1Publisher', () => { .mockResolvedValueOnce(processTxHash as `0x${string}`) .mockResolvedValueOnce(processTxHash as `0x${string}`); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); expect(rollupContractWrite.propose).toHaveBeenCalledTimes(1); @@ -216,7 +216,7 @@ describe('L1Publisher', () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractRead.validateHeader.mockRejectedValueOnce(new Error('Test error')); - await expect(publisher.processL2Block(l2Block)).rejects.toThrow(); + await expect(publisher.proposeL2Block(l2Block)).rejects.toThrow(); expect(rollupContractRead.validateHeader).toHaveBeenCalledTimes(1); expect(rollupContractWrite.propose).toHaveBeenCalledTimes(0); @@ -227,7 +227,7 @@ describe('L1Publisher', () => { rollupContractSimulate.propose.mockResolvedValueOnce(proposeTxHash as `0x${string}`); rollupContractWrite.propose.mockRejectedValueOnce(new Error()); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); expect(rollupContractWrite.propose).toHaveBeenCalledTimes(1); @@ -240,7 +240,7 @@ describe('L1Publisher', () => { rollupContractWrite.propose.mockResolvedValueOnce(processTxHash); publicClient.getTransactionReceipt.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(processTxReceipt); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(true); expect(publicClient.getTransactionReceipt).toHaveBeenCalledTimes(2); @@ -252,7 +252,7 @@ describe('L1Publisher', () => { rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash as `0x${string}`); publicClient.getTransactionReceipt.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(true); expect(publicClient.getTransactionReceipt).toHaveBeenCalledTimes(2); @@ -263,7 +263,7 @@ describe('L1Publisher', () => { rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); publicClient.getTransactionReceipt.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); }); @@ -274,7 +274,7 @@ describe('L1Publisher', () => { publicClient.getTransactionReceipt.mockResolvedValueOnce({ ...processTxReceipt, status: 'reverted' }); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); }); @@ -283,7 +283,7 @@ describe('L1Publisher', () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockImplementationOnce(() => sleep(10, proposeTxHash) as Promise<`0x${string}`>); - const resultPromise = publisher.processL2Block(l2Block); + const resultPromise = publisher.proposeL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; @@ -296,7 +296,7 @@ describe('L1Publisher', () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockImplementationOnce(() => sleep(10, processTxHash) as Promise<`0x${string}`>); - const resultPromise = publisher.processL2Block(l2Block); + const resultPromise = publisher.proposeL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index d1a59a138af..88bde93945a 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -244,12 +244,11 @@ export class L1Publisher { } /** - * Publishes L2 block on L1. - * @param block - L2 block to publish. + * Proposes a L2 block on L1. + * @param block - L2 block to propose. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - // TODO: rename propose - public async processL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]): Promise { + public async proposeL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]): Promise { const ctx = { blockNumber: block.number, slotNumber: block.header.globalVariables.slotNumber.toBigInt(), diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 73b441a58eb..deb6fb7b9a3 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -191,7 +191,7 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce([tx]); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -204,8 +204,8 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); // Ok, we have an issue that we never actually call the process L2 block - expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -222,7 +222,7 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValue([tx]); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); @@ -253,7 +253,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -273,7 +273,7 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce(txs); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -293,7 +293,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -314,7 +314,7 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce(txs); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -329,7 +329,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -349,7 +349,7 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce(txs); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -365,7 +365,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -385,7 +385,7 @@ describe('sequencer', () => { blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -412,8 +412,8 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -433,7 +433,7 @@ describe('sequencer', () => { blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -463,8 +463,8 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -484,7 +484,7 @@ describe('sequencer', () => { blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -514,9 +514,9 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledTimes(1); + expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -533,7 +533,7 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce([tx]); blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); blockSimulator.finaliseBlock.mockResolvedValue({ block }); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.proposeL2Block.mockResolvedValueOnce(true); const mockedGlobalVariables = new GlobalVariables( chainId, @@ -558,7 +558,7 @@ describe('sequencer', () => { await sequencer.work(); - expect(publisher.processL2Block).not.toHaveBeenCalled(); + expect(publisher.proposeL2Block).not.toHaveBeenCalled(); }); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index f57720ea370..90821e7ad6b 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -538,7 +538,7 @@ export class Sequencer { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7974): we do not have transaction[] lists in the block for now // Dont do anything with the proposals for now - just collect them - this.log.verbose('ATTEST | Creating block proposal'); + this.log.verbose('Creating block proposal'); const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); this.state = SequencerState.PUBLISHING_BLOCK_TO_PEERS; @@ -564,7 +564,7 @@ export class Sequencer { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - const publishedL2Block = await this.publisher.processL2Block(block, attestations, txHashes); + const publishedL2Block = await this.publisher.proposeL2Block(block, attestations, txHashes); if (publishedL2Block) { this.lastPublishedBlock = block.number; } else { From 9e7d2d8e19a3c17c4915c988085a3822e1435110 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:48:04 +0000 Subject: [PATCH 016/123] fix --- yarn-project/foundation/src/timer/timeout.ts | 2 - .../p2p/src/service/reqresp/reqresp.test.ts | 18 +++++++- .../p2p/src/service/reqresp/reqresp.ts | 43 +++++++++++-------- 3 files changed, 40 insertions(+), 23 deletions(-) diff --git a/yarn-project/foundation/src/timer/timeout.ts b/yarn-project/foundation/src/timer/timeout.ts index a498dbf8cdd..8f7ee422a7d 100644 --- a/yarn-project/foundation/src/timer/timeout.ts +++ b/yarn-project/foundation/src/timer/timeout.ts @@ -28,9 +28,7 @@ export class TimeoutTask { const interruptTimeout = !this.timeout ? 0 : setTimeout(this.interrupt, this.timeout); try { const start = Date.now(); - console.log("before race"); const result = await Promise.race([this.fn(), this.interruptPromise]); - console.log("after race", result); this.totalTime = Date.now() - start; return result; } finally { diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index b1c6ebdaab1..348bfa07128 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -121,12 +121,11 @@ describe('ReqResp', () => { await stopNodes(nodes); }); - it('Should timeout if nothing is returned over the stream', async () => { + it('Should hit individual timeout if nothing is returned over the stream', async () => { const nodes = await createNodes(2); await startNodes(nodes); - console.log((nodes[1].req as any).subProtocolHandlers[TX_REQ_PROTOCOL]) jest.spyOn((nodes[1].req as any).subProtocolHandlers, TX_REQ_PROTOCOL).mockImplementation(() => { return new Promise(() => {}); }); @@ -140,5 +139,20 @@ describe('ReqResp', () => { await stopNodes(nodes); }); + + // TODO: complete this test + it("Should hit collective timeout if nothing is returned over the stream from multiple peers", async () => { + const nodes = await createNodes(3); + + await startNodes(nodes); + + jest.spyOn((nodes[1].req as any).subProtocolHandlers, TX_REQ_PROTOCOL).mockImplementation(() => { + return new Promise(() => {}); + }); + + }); + }); + + }); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 496e2b420d4..a35a4f9a746 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -1,7 +1,7 @@ // @attribution: lodestar impl for inspiration import { type Logger, createDebugLogger } from '@aztec/foundation/log'; -import { Stream, type IncomingStreamData, type PeerId } from '@libp2p/interface'; +import { type Stream, type IncomingStreamData, type PeerId } from '@libp2p/interface'; import { pipe } from 'it-pipe'; import { type Libp2p } from 'libp2p'; import { type Uint8ArrayList } from 'uint8arraylist'; @@ -12,7 +12,7 @@ import { type ReqRespSubProtocolHandlers, } from './interface.js'; import { executeTimeoutWithCustomError } from '@aztec/foundation/timer'; -import { IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; +import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; /** * The Request Response Service @@ -71,21 +71,28 @@ export class ReqResp { * @returns - The response from the peer, otherwise undefined */ async sendRequest(subProtocol: ReqRespSubProtocol, payload: Buffer): Promise { - // Get active peers - const peers = this.libp2p.getPeers(); - - // Attempt to ask all of our peers - for (const peer of peers) { - const response = await this.sendRequestToPeer(peer, subProtocol, payload); - console.log("In send request response", response); - - // If we get a response, return it, otherwise we iterate onto the next peer - // We do not consider it a success if we have an empty buffer - if (response && response.length > 0) { - return response; + const requestFunction = async () => { + // Get active peers + const peers = this.libp2p.getPeers(); + + // Attempt to ask all of our peers + for (const peer of peers) { + const response = await this.sendRequestToPeer(peer, subProtocol, payload); + + // If we get a response, return it, otherwise we iterate onto the next peer + // We do not consider it a success if we have an empty buffer + if (response && response.length > 0) { + return response; + } } + return undefined; } - return undefined; + + return await executeTimeoutWithCustomError( + requestFunction, + this.overallRequestTimeoutMs, + () => new CollectiveReqRespTimeoutError() + ); } /** @@ -109,14 +116,12 @@ export class ReqResp { const result = await executeTimeoutWithCustomError( (): Promise => pipe([payload], stream!, this.readMessage), - this.overallRequestTimeoutMs, + this.individualRequestTimeoutMs, () => new IndiviualReqRespTimeoutError(), ); - console.log("after timeout check", result); await stream.close(); this.logger.debug(`Stream closed with ${peerId.publicKey} for ${subProtocol}`); - console.log("after stream close", result); return result; } catch (e: any) { @@ -130,8 +135,8 @@ export class ReqResp { this.logger.error(`Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`); } } - return undefined; } + return undefined; } /** From 3c8e1b9ee837dc263d16f17da62914e5221af4dc Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 21:50:41 +0000 Subject: [PATCH 017/123] chore: include tests for specific error messages --- .../p2p/src/service/reqresp/reqresp.test.ts | 36 +++++++++++++++---- .../p2p/src/service/reqresp/reqresp.ts | 23 +++++++----- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index 348bfa07128..b54e25bb317 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -5,6 +5,7 @@ import { describe, expect, it, jest } from '@jest/globals'; import { MOCK_SUB_PROTOCOL_HANDLERS, connectToPeers, createNodes, startNodes, stopNodes } from '../../mocks/index.js'; import { PING_PROTOCOL, TX_REQ_PROTOCOL } from './interface.js'; +import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; // The Req Resp protocol should allow nodes to dial specific peers // and ask for specific data that they missed via the traditional gossip protocol. @@ -130,26 +131,49 @@ describe('ReqResp', () => { return new Promise(() => {}); }); + // Spy on the logger to make sure the error message is logged + const loggerSpy = jest.spyOn((nodes[0].req as any).logger, 'error'); + await sleep(500); await connectToPeers(nodes); await sleep(500); - const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, Buffer.from('ping')); + const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, Buffer.from('tx')); expect(res).toBeUndefined(); + // Make sure the error message is logged + const errorMessage = `${new IndiviualReqRespTimeoutError().message} | peerId: ${nodes[1].p2p.peerId.toString()} | subProtocol: ${TX_REQ_PROTOCOL}`; + expect(loggerSpy).toHaveBeenCalledWith(errorMessage); + await stopNodes(nodes); }); - // TODO: complete this test it("Should hit collective timeout if nothing is returned over the stream from multiple peers", async () => { - const nodes = await createNodes(3); + const nodes = await createNodes(4); await startNodes(nodes); - jest.spyOn((nodes[1].req as any).subProtocolHandlers, TX_REQ_PROTOCOL).mockImplementation(() => { - return new Promise(() => {}); - }); + for (let i = 1; i < nodes.length; i++) { + jest.spyOn((nodes[i].req as any).subProtocolHandlers, TX_REQ_PROTOCOL).mockImplementation(() => { + return new Promise(() => {}); + }); + } + + // Spy on the logger to make sure the error message is logged + const loggerSpy = jest.spyOn((nodes[0].req as any).logger, 'error'); + + await sleep(500); + await connectToPeers(nodes); + await sleep(500); + const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, Buffer.from('tx')); + expect(res).toBeUndefined(); + + // Make sure the error message is logged + const errorMessage = `${new CollectiveReqRespTimeoutError().message} | subProtocol: ${TX_REQ_PROTOCOL}`; + expect(loggerSpy).toHaveBeenCalledWith(errorMessage); + + await stopNodes(nodes); }); }); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index a35a4f9a746..bc3146ab021 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -88,11 +88,16 @@ export class ReqResp { return undefined; } - return await executeTimeoutWithCustomError( - requestFunction, - this.overallRequestTimeoutMs, - () => new CollectiveReqRespTimeoutError() - ); + try { + return await executeTimeoutWithCustomError( + requestFunction, + this.overallRequestTimeoutMs, + () => new CollectiveReqRespTimeoutError() + ); + } catch (e: any) { + this.logger.error(`${e.message} | subProtocol: ${subProtocol}`); + return undefined; + } } /** @@ -112,7 +117,7 @@ export class ReqResp { try { stream = await this.libp2p.dialProtocol(peerId, subProtocol); - this.logger.debug(`Stream opened with ${peerId.publicKey} for ${subProtocol}`); + this.logger.debug(`Stream opened with ${peerId.toString()} for ${subProtocol}`); const result = await executeTimeoutWithCustomError( (): Promise => pipe([payload], stream!, this.readMessage), @@ -121,16 +126,16 @@ export class ReqResp { ); await stream.close(); - this.logger.debug(`Stream closed with ${peerId.publicKey} for ${subProtocol}`); + this.logger.debug(`Stream closed with ${peerId.toString()} for ${subProtocol}`); return result; } catch (e: any) { - this.logger.error(`${e.message} | peer: ${peerId.publicKey?.toString()} | subProtocol: ${subProtocol}`); + this.logger.error(`${e.message} | peerId: ${peerId.toString()} | subProtocol: ${subProtocol}`); } finally { if (stream) { try { await stream.close(); - this.logger.debug(`Stream closed with ${peerId.publicKey} for ${subProtocol}`); + this.logger.debug(`Stream closed with ${peerId.toString()} for ${subProtocol}`); } catch (closeError) { this.logger.error(`Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`); } From 9a738e5533cfe08192cb96943b917e2f38efa8e3 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 21:51:03 +0000 Subject: [PATCH 018/123] fmt --- yarn-project/p2p/src/errors/reqresp.error.ts | 2 +- .../p2p/src/service/reqresp/reqresp.test.ts | 11 +++++------ yarn-project/p2p/src/service/reqresp/reqresp.ts | 14 ++++++++------ 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/yarn-project/p2p/src/errors/reqresp.error.ts b/yarn-project/p2p/src/errors/reqresp.error.ts index f70721efc5e..a31973d3e67 100644 --- a/yarn-project/p2p/src/errors/reqresp.error.ts +++ b/yarn-project/p2p/src/errors/reqresp.error.ts @@ -18,4 +18,4 @@ export class CollectiveReqRespTimeoutError extends Error { constructor() { super(`Request to all peers timed out`); } -} \ No newline at end of file +} diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index b54e25bb317..ecd9a6824e9 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -3,9 +3,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { describe, expect, it, jest } from '@jest/globals'; +import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; import { MOCK_SUB_PROTOCOL_HANDLERS, connectToPeers, createNodes, startNodes, stopNodes } from '../../mocks/index.js'; import { PING_PROTOCOL, TX_REQ_PROTOCOL } from './interface.js'; -import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; // The Req Resp protocol should allow nodes to dial specific peers // and ask for specific data that they missed via the traditional gossip protocol. @@ -142,13 +142,15 @@ describe('ReqResp', () => { expect(res).toBeUndefined(); // Make sure the error message is logged - const errorMessage = `${new IndiviualReqRespTimeoutError().message} | peerId: ${nodes[1].p2p.peerId.toString()} | subProtocol: ${TX_REQ_PROTOCOL}`; + const errorMessage = `${ + new IndiviualReqRespTimeoutError().message + } | peerId: ${nodes[1].p2p.peerId.toString()} | subProtocol: ${TX_REQ_PROTOCOL}`; expect(loggerSpy).toHaveBeenCalledWith(errorMessage); await stopNodes(nodes); }); - it("Should hit collective timeout if nothing is returned over the stream from multiple peers", async () => { + it('Should hit collective timeout if nothing is returned over the stream from multiple peers', async () => { const nodes = await createNodes(4); await startNodes(nodes); @@ -175,8 +177,5 @@ describe('ReqResp', () => { await stopNodes(nodes); }); - }); - - }); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index bc3146ab021..29a63de08ea 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -1,18 +1,18 @@ // @attribution: lodestar impl for inspiration import { type Logger, createDebugLogger } from '@aztec/foundation/log'; +import { executeTimeoutWithCustomError } from '@aztec/foundation/timer'; -import { type Stream, type IncomingStreamData, type PeerId } from '@libp2p/interface'; +import { type IncomingStreamData, type PeerId, type Stream } from '@libp2p/interface'; import { pipe } from 'it-pipe'; import { type Libp2p } from 'libp2p'; import { type Uint8ArrayList } from 'uint8arraylist'; +import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; import { DEFAULT_SUB_PROTOCOL_HANDLERS, type ReqRespSubProtocol, type ReqRespSubProtocolHandlers, } from './interface.js'; -import { executeTimeoutWithCustomError } from '@aztec/foundation/timer'; -import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; /** * The Request Response Service @@ -86,13 +86,13 @@ export class ReqResp { } } return undefined; - } + }; try { return await executeTimeoutWithCustomError( requestFunction, this.overallRequestTimeoutMs, - () => new CollectiveReqRespTimeoutError() + () => new CollectiveReqRespTimeoutError(), ); } catch (e: any) { this.logger.error(`${e.message} | subProtocol: ${subProtocol}`); @@ -137,7 +137,9 @@ export class ReqResp { await stream.close(); this.logger.debug(`Stream closed with ${peerId.toString()} for ${subProtocol}`); } catch (closeError) { - this.logger.error(`Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`); + this.logger.error( + `Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`, + ); } } } From 045af5a07dc5259cfa5386806fe1b8bc0ad9f28c Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 22:11:04 +0000 Subject: [PATCH 019/123] clean --- l1-contracts/test/sparta/Sparta.t.sol | 1 - .../src/p2p/block_attestation.ts | 9 ++++---- .../circuit-types/src/p2p/block_proposal.ts | 2 -- yarn-project/p2p/src/client/p2p_client.ts | 19 +++++++++++++++- yarn-project/p2p/src/errors/index.ts | 1 - yarn-project/p2p/src/errors/reqresp.error.ts | 14 ------------ .../src/publisher/l1-publisher.ts | 22 ++++--------------- .../sequencer-client/src/publisher/utils.ts | 17 ++++++++++++++ .../src/sequencer/sequencer.ts | 13 ++--------- .../src/duties/validation_service.ts | 9 ++++---- .../validator-client/src/validator.test.ts | 1 - 11 files changed, 51 insertions(+), 57 deletions(-) delete mode 100644 yarn-project/p2p/src/errors/index.ts delete mode 100644 yarn-project/p2p/src/errors/reqresp.error.ts create mode 100644 yarn-project/sequencer-client/src/publisher/utils.ts diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index a6f802dc102..72cc4dc82b7 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -184,7 +184,6 @@ contract SpartaTest is DecoderBase { rollup.setupEpoch(); - // TODO: include these in the base block and include in the load function bytes32[] memory txHashes = new bytes32[](0); if (_signatureCount > 0 && ree.proposer != address(0)) { diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 47ff33f90a7..e78fa7237c2 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -55,10 +55,7 @@ export class BlockAttestation extends Gossipable { async getSender() { if (!this.sender) { // Recover the sender from the attestation - const payload = serializeToBuffer([this.archive, this.txHashes]); - // TODO(md) - temporarily hashing again to have less changes in the rollup - const hashed = keccak256(payload); - // const payload0x: `0x${string}` = `0x${payload.toString('hex')}`; + const hashed = keccak256(this.getPayload()); const address = await recoverMessageAddress({ message: { raw: hashed }, signature: this.signature.to0xString(), @@ -70,6 +67,10 @@ export class BlockAttestation extends Gossipable { return this.sender; } + getPayload(): Buffer { + return serializeToBuffer([this.archive, this.txHashes]); + } + toBuffer(): Buffer { return serializeToBuffer([this.header, this.archive, this.txHashes.length, this.txHashes, this.signature]); } diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 1d74c70212c..e9d06208a01 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -57,8 +57,6 @@ export class BlockProposal extends Gossipable { // performance note(): this signature method requires another hash behind the scenes const hashed = keccak256(this.getPayload()); const address = await recoverMessageAddress({ - // TODO(md): fix this up - // message: { raw: this.p2pMessageIdentifier().to0xString() }, message: { raw: hashed }, signature: this.signature.to0xString(), }); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 4b079c9d5b3..7534c320164 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -291,12 +291,29 @@ export class P2PClient implements P2P { this.p2pService.registerBlockReceivedCallback(handler); } - // TODO: this will need a timeout + retry mechanism to make sure that we can retreive things on time + /** + * Requests the transactions with the given hashes from the network. + * + * If a transaction can be retrieved, it will be returned, if not an undefined + * will be returned. In place. + * + * @param txHashes - The hashes of the transactions to request. + * @returns A promise that resolves to an array of transactions or undefined. + */ public requestTxs(txHashes: TxHash[]): Promise<(Tx | undefined)[]> { const requestPromises = txHashes.map(txHash => this.requestTxByHash(txHash)); return Promise.all(requestPromises); } + /** + * Uses the Request Response protocol to request a transaction from the network. + * + * If the underlying request response protocol fails, then we return undefined. + * If it succeeds then we add the transaction to our transaction pool and return. + * + * @param txHash - The hash of the transaction to request. + * @returns A promise that resolves to a transaction or undefined. + */ public async requestTxByHash(txHash: TxHash): Promise { const tx = await this.p2pService.sendRequest(TX_REQ_PROTOCOL, txHash); diff --git a/yarn-project/p2p/src/errors/index.ts b/yarn-project/p2p/src/errors/index.ts deleted file mode 100644 index 01b77d2a6a2..00000000000 --- a/yarn-project/p2p/src/errors/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './reqresp.error.js'; diff --git a/yarn-project/p2p/src/errors/reqresp.error.ts b/yarn-project/p2p/src/errors/reqresp.error.ts deleted file mode 100644 index 1349205cd74..00000000000 --- a/yarn-project/p2p/src/errors/reqresp.error.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { type ReqRespSubProtocol, TX_REQ_PROTOCOL } from '../service/reqresp/interface.js'; - -export class ReqRespError extends Error { - constructor(protocol: ReqRespSubProtocol, message: string) { - super(message); - } -} - -// TODO(md): think about what these errors should ideally be -export class TxHandlerReqRespError extends ReqRespError { - constructor() { - super(TX_REQ_PROTOCOL, 'Could not perform tx handler request response'); - } -} diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index d1a59a138af..02320b0f782 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -33,6 +33,7 @@ import type * as chains from 'viem/chains'; import { type PublisherConfig, type TxSenderConfig } from './config.js'; import { L1PublisherMetrics } from './l1-publisher-metrics.js'; +import { prettyLogVeimError } from './utils.js'; /** * Stats for a sent transaction. @@ -248,7 +249,6 @@ export class L1Publisher { * @param block - L2 block to publish. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - // TODO: rename propose public async processL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]): Promise { const ctx = { blockNumber: block.number, @@ -256,8 +256,7 @@ export class L1Publisher { blockHash: block.hash().toString(), }; - // TODO: move this to take in the proposal to get the digest - rather than manually - const tempDigest = keccak256(serializeToBuffer(block.archive.root, txHashes ?? [])); + const digest = keccak256(serializeToBuffer(block.archive.root, txHashes ?? [])); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), @@ -279,8 +278,7 @@ export class L1Publisher { // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which // make time consistency checks break. await this.validateBlockForSubmission(block.header, { - // digest: block.archive.root.toBuffer(), // THIS IS NOT THE DIGEST ANYMORE!!!!! - digest: tempDigest, + digest, signatures: attestations ?? [], }); @@ -534,19 +532,7 @@ export class L1Publisher { }); } } catch (err) { - if (err instanceof BaseError) { - const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); - if (revertError instanceof ContractFunctionRevertedError) { - // TODO: turn this into a function - const errorName = revertError.data?.errorName ?? ''; - const args = - revertError.metaMessages && revertError.metaMessages?.length > 1 - ? revertError.metaMessages[1].trimStart() - : ''; - this.log.error(`propose failed with "${errorName}${args}"`); - return undefined; - } - } + prettyLogVeimError(err, this.log); this.log.error(`Rollup publish failed`, err); return undefined; } diff --git a/yarn-project/sequencer-client/src/publisher/utils.ts b/yarn-project/sequencer-client/src/publisher/utils.ts new file mode 100644 index 00000000000..7e23aa0c878 --- /dev/null +++ b/yarn-project/sequencer-client/src/publisher/utils.ts @@ -0,0 +1,17 @@ +import { Logger } from "@aztec/foundation/log"; +import { BaseError, ContractFunctionRevertedError } from "viem"; + + +export function prettyLogVeimError(err: any, logger: Logger) { + if (err instanceof BaseError) { + const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); + if (revertError instanceof ContractFunctionRevertedError) { + const errorName = revertError.data?.errorName ?? ''; + const args = + revertError.metaMessages && revertError.metaMessages?.length > 1 + ? revertError.metaMessages[1].trimStart() + : ''; + logger.debug(`canProposeAtTime failed with "${errorName}${args}"`); + } + } +} \ No newline at end of file diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index f57720ea370..bcf8e0f6d78 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -38,6 +38,7 @@ import { type L1Publisher } from '../publisher/l1-publisher.js'; import { type TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { type SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; +import { prettyLogVeimError } from '../publisher/utils.js'; export type ShouldProposeArgs = { pendingTxsCount?: number; @@ -311,17 +312,7 @@ export class Sequencer { this.log.debug(`Can propose block ${proposalBlockNumber} at slot ${slot}`); return slot; } catch (err) { - if (err instanceof BaseError) { - const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); - if (revertError instanceof ContractFunctionRevertedError) { - const errorName = revertError.data?.errorName ?? ''; - const args = - revertError.metaMessages && revertError.metaMessages?.length > 1 - ? revertError.metaMessages[1].trimStart() - : ''; - this.log.debug(`canProposeAtTime failed with "${errorName}${args}"`); - } - } + prettyLogVeimError(err, this.log); throw err; } } diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 6da7e816b76..fe2a3727940 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -19,9 +19,9 @@ export class ValidationService { * @returns A block proposal signing the above information (not the current implementation!!!) */ async createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { - // Note: just signing the archive for now + + // NOTE: just signing the archive and txs for now const payload = serializeToBuffer([archive, txs]); - // TODO(temp hash it together before signing) const hashed = keccak256(payload); const sig = await this.keyStore.sign(hashed); @@ -31,14 +31,15 @@ export class ValidationService { /** * Attest to the given block proposal constructed by the current sequencer * + * NOTE: This is just a blind signing. + * We assume that the proposal is valid and DA guarantees have been checked previously. + * * @param proposal - The proposal to attest to * @returns attestation */ async attestToProposal(proposal: BlockProposal): Promise { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct - // TODO: in the function before this, check that all of the txns exist in the payload - const buf = keccak256(proposal.getPayload()); const sig = await this.keyStore.sign(buf); return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index ec238e14077..4802656e5ce 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -49,7 +49,6 @@ describe('ValidationService', () => { expect(await blockProposal.getSender()).toEqual(validatorAddress); }); - // TODO: add a test on the sequencer that it can recover in case that this timeout is hit it('Should a timeout if we do not collect enough attestations in time', async () => { const proposal = await makeBlockProposal(); From 73d26ec97a93c3f58e5b4cefc854b3a203f4c718 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 6 Sep 2024 22:29:41 +0000 Subject: [PATCH 020/123] =?UTF-8?q?=F0=9F=A7=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- l1-contracts/src/core/Rollup.sol | 1 - l1-contracts/src/core/interfaces/IRollup.sol | 2 -- .../src/publisher/l1-publisher.ts | 3 +-- .../sequencer-client/src/publisher/utils.ts | 26 +++++++++---------- .../src/sequencer/sequencer.ts | 4 +-- .../src/duties/validation_service.ts | 1 - 6 files changed, 14 insertions(+), 23 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index fad78591128..e8ebb009920 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -182,7 +182,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * @param _signatures - Signatures from the validators * @param _body - The body of the L2 block */ - // Temp change name to fix viem issue function proposeWithBody( bytes calldata _header, bytes32 _archive, diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index d6f60be1a47..23c3012372b 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -34,8 +34,6 @@ interface IRollup { function OUTBOX() external view returns (IOutbox); - // Temp: turns out there is a viem bug where it cannot differentiate between the two - // different types function proposeWithBody( bytes calldata _header, bytes32 _archive, diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 02320b0f782..4c1937a6b79 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -13,7 +13,6 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import pick from 'lodash.pick'; import { - BaseError, ContractFunctionRevertedError, type GetContractReturnType, type Hex, @@ -503,7 +502,7 @@ export class L1Publisher { // We almost always want to skip simulation here if we are not already within the slot, else we will be one slot ahead // See comment attached to static SKIP_SIMULATION definition if (!L1Publisher.SKIP_SIMULATION) { - const simulationResult = await this.rollupContract.simulate.proposeWithBody(args, { + await this.rollupContract.simulate.proposeWithBody(args, { account: this.account, }); } diff --git a/yarn-project/sequencer-client/src/publisher/utils.ts b/yarn-project/sequencer-client/src/publisher/utils.ts index 7e23aa0c878..13842102a2c 100644 --- a/yarn-project/sequencer-client/src/publisher/utils.ts +++ b/yarn-project/sequencer-client/src/publisher/utils.ts @@ -1,17 +1,15 @@ -import { Logger } from "@aztec/foundation/log"; -import { BaseError, ContractFunctionRevertedError } from "viem"; +import { type Logger } from '@aztec/foundation/log'; +import { BaseError, ContractFunctionRevertedError } from 'viem'; export function prettyLogVeimError(err: any, logger: Logger) { - if (err instanceof BaseError) { - const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); - if (revertError instanceof ContractFunctionRevertedError) { - const errorName = revertError.data?.errorName ?? ''; - const args = - revertError.metaMessages && revertError.metaMessages?.length > 1 - ? revertError.metaMessages[1].trimStart() - : ''; - logger.debug(`canProposeAtTime failed with "${errorName}${args}"`); - } - } -} \ No newline at end of file + if (err instanceof BaseError) { + const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); + if (revertError instanceof ContractFunctionRevertedError) { + const errorName = revertError.data?.errorName ?? ''; + const args = + revertError.metaMessages && revertError.metaMessages?.length > 1 ? revertError.metaMessages[1].trimStart() : ''; + logger.debug(`canProposeAtTime failed with "${errorName}${args}"`); + } + } +} diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index bcf8e0f6d78..611994cdaec 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -30,15 +30,13 @@ import { Attributes, type TelemetryClient, type Tracer, trackSpan } from '@aztec import { type ValidatorClient } from '@aztec/validator-client'; import { type WorldStateStatus, type WorldStateSynchronizer } from '@aztec/world-state'; -import { BaseError, ContractFunctionRevertedError } from 'viem'; - import { type BlockBuilderFactory } from '../block_builder/index.js'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { type L1Publisher } from '../publisher/l1-publisher.js'; +import { prettyLogVeimError } from '../publisher/utils.js'; import { type TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { type SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; -import { prettyLogVeimError } from '../publisher/utils.js'; export type ShouldProposeArgs = { pendingTxsCount?: number; diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index fe2a3727940..1115d72208f 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -19,7 +19,6 @@ export class ValidationService { * @returns A block proposal signing the above information (not the current implementation!!!) */ async createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { - // NOTE: just signing the archive and txs for now const payload = serializeToBuffer([archive, txs]); const hashed = keccak256(payload); From d358228317434e03594f3fcdcfe7182c837732e1 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 7 Sep 2024 17:28:36 +0000 Subject: [PATCH 021/123] chore: fix sequencing tests --- .../src/sequencer/sequencer.test.ts | 44 +++++++++++++------ 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 73b441a58eb..39fda5b0ae2 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -180,6 +180,7 @@ describe('sequencer', () => { it('builds a block out of a single tx', async () => { const tx = mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; + const txHash = tx.getTxHash(); const result: ProvingSuccess = { status: PROVING_STATUS.SUCCESS, @@ -205,13 +206,14 @@ describe('sequencer', () => { ); // Ok, we have an issue that we never actually call the process L2 block expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash]); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block when it is their turn', async () => { const tx = mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; + const txHash = tx.getTxHash(); const result: ProvingSuccess = { status: PROVING_STATUS.SUCCESS, }; @@ -253,16 +255,19 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash]); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs rejecting double spends', async () => { + const doubleSpendTxIndex = 1; const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); - const doubleSpendTx = txs[1]; + const validTxHashes = txs.filter((_, i) => i !== doubleSpendTxIndex).map(tx => tx.getTxHash()); + + const doubleSpendTx = txs[doubleSpendTxIndex]; const result: ProvingSuccess = { status: PROVING_STATUS.SUCCESS, }; @@ -293,17 +298,20 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes); expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs rejecting incorrect chain ids', async () => { + const invalidChainTxIndex = 1; const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); - const invalidChainTx = txs[1]; + const invalidChainTx = txs[invalidChainTxIndex]; + const validTxHashes = txs.filter((_, i) => i !== invalidChainTxIndex).map(tx => tx.getTxHash()); + const result: ProvingSuccess = { status: PROVING_STATUS.SUCCESS, }; @@ -329,16 +337,20 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes); expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs dropping the ones that go over max size', async () => { + const invalidTransactionIndex = 1; + const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; txs.forEach(tx => { tx.data.constants.txContext.chainId = chainId; }); + const validTxHashes = txs.filter((_, i) => i !== invalidTransactionIndex).map(tx => tx.getTxHash()); + const result: ProvingSuccess = { status: PROVING_STATUS.SUCCESS, }; @@ -354,8 +366,9 @@ describe('sequencer', () => { globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); // We make txs[1] too big to fit - (txs[1] as Writeable).unencryptedLogs = UnencryptedTxL2Logs.random(2, 4); - (txs[1].unencryptedLogs.functionLogs[0].logs[0] as Writeable).data = randomBytes(1024 * 1022); + (txs[invalidTransactionIndex] as Writeable).unencryptedLogs = UnencryptedTxL2Logs.random(2, 4); + (txs[invalidTransactionIndex].unencryptedLogs.functionLogs[0].logs[0] as Writeable).data = + randomBytes(1024 * 1022); await sequencer.initialSync(); await sequencer.work(); @@ -365,7 +378,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -401,11 +414,14 @@ describe('sequencer', () => { // block is not built with 3 txs p2p.getTxs.mockReturnValueOnce(txs.slice(0, 3)); + await sequencer.work(); expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); // block is built with 4 txs p2p.getTxs.mockReturnValueOnce(txs.slice(0, 4)); + const txHashes = txs.slice(0, 4).map(tx => tx.getTxHash()); + await sequencer.work(); expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( 4, @@ -413,7 +429,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -464,7 +480,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), []); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -506,7 +522,9 @@ describe('sequencer', () => { sequencer.flush(); // block is built with 3 txs - p2p.getTxs.mockReturnValueOnce(txs.slice(0, 3)); + const postFlushTxs = txs.slice(0, 3); + p2p.getTxs.mockReturnValueOnce(postFlushTxs); + const postFlushTxHashes = postFlushTxs.map(tx => tx.getTxHash()); await sequencer.work(); expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(1); expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( @@ -516,7 +534,7 @@ describe('sequencer', () => { ); expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures(), postFlushTxHashes); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); From f673593eb9fbd4f900ab41a79fa8f146309a47f7 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 7 Sep 2024 17:34:14 +0000 Subject: [PATCH 022/123] fmt --- yarn-project/sequencer-client/src/sequencer/sequencer.test.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 4f8444abb6d..f7bdf8c1323 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -367,7 +367,8 @@ describe('sequencer', () => { // We make txs[1] too big to fit (txs[invalidTransactionIndex] as Writeable).unencryptedLogs = UnencryptedTxL2Logs.random(2, 4); - (txs[invalidTransactionIndex].unencryptedLogs.functionLogs[0].logs[0] as Writeable).data = randomBytes(1024 * 1022); + (txs[invalidTransactionIndex].unencryptedLogs.functionLogs[0].logs[0] as Writeable).data = + randomBytes(1024 * 1022); await sequencer.initialSync(); await sequencer.work(); From a15ab17e5de5fa959b78d1f9e5272b47f70a0385 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 7 Sep 2024 17:35:34 +0000 Subject: [PATCH 023/123] fmt --- yarn-project/foundation/src/timer/timeout.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/yarn-project/foundation/src/timer/timeout.ts b/yarn-project/foundation/src/timer/timeout.ts index 8f7ee422a7d..f599543bc5e 100644 --- a/yarn-project/foundation/src/timer/timeout.ts +++ b/yarn-project/foundation/src/timer/timeout.ts @@ -10,7 +10,12 @@ export class TimeoutTask { private interrupt = () => {}; private totalTime = 0; - constructor(private fn: () => Promise, private timeout = 0, fnName = '', error = () => new Error(`Timeout${fnName ? ` running ${fnName}` : ''} after ${timeout}ms.`)) { + constructor( + private fn: () => Promise, + private timeout = 0, + fnName = '', + error = () => new Error(`Timeout${fnName ? ` running ${fnName}` : ''} after ${timeout}ms.`), + ) { this.interruptPromise = new Promise((_, reject) => { this.interrupt = () => reject(error()); }); @@ -63,7 +68,12 @@ export const executeTimeout = async (fn: () => Promise, timeout = 0, fnNam return await task.exec(); }; -export const executeTimeoutWithCustomError = async (fn: () => Promise, timeout = 0, error = () => new Error("No custom error provided"), fnName = '') => { +export const executeTimeoutWithCustomError = async ( + fn: () => Promise, + timeout = 0, + error = () => new Error('No custom error provided'), + fnName = '', +) => { const task = new TimeoutTask(fn, timeout, fnName, error); return await task.exec(); }; From 2e3f80bb483612e5e569c4d638e95d885e4a43c9 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 7 Sep 2024 17:38:15 +0000 Subject: [PATCH 024/123] fmt solidity --- l1-contracts/src/core/Rollup.sol | 1 - l1-contracts/test/sparta/Sparta.t.sol | 2 -- 2 files changed, 3 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index e8ebb009920..b0b1aef496c 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -443,7 +443,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { HeaderLib.Header memory header = HeaderLib.decode(_header); setupEpoch(); - // TODO: make function for this bytes32 digest = keccak256(abi.encodePacked(_archive, _txHashes)); _validateHeader({ _header: header, diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 72cc4dc82b7..0528e475c03 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -166,8 +166,6 @@ contract SpartaTest is DecoderBase { DecoderBase.Full memory full = load(_name); bytes memory header = full.block.header; bytes32 archive = full.block.archive; - // TODO(md): get the tx hashes from the block - but we do not have them now - // Come back to this bytes memory body = full.block.body; StructToAvoidDeepStacks memory ree; From 1bde1fe6f9a0f9c7eef37dd8c17eb69858ef029d Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sun, 8 Sep 2024 08:51:29 +0000 Subject: [PATCH 025/123] fix: test hash --- yarn-project/circuit-types/src/p2p/mocks.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index f4646e20919..0c57accf546 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -3,6 +3,7 @@ import { Fr } from '@aztec/foundation/fields'; import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type PrivateKeyAccount } from 'viem'; +import { keccak256 } from 'viem'; import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; import { TxHash } from '../tx/tx_hash.js'; @@ -16,7 +17,8 @@ export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise TxHash.random()); - const signature = Signature.from0xString(await signer.signMessage({ message: { raw: archive.toString() } })); + const hash = keccak256(serializeToBuffer([archive, txs])); + const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); return new BlockProposal(blockHeader, archive, txs, signature); }; @@ -28,7 +30,7 @@ export const makeBlockAttestation = async (signer?: PrivateKeyAccount): Promise< const blockHeader = makeHeader(1); const archive = Fr.random(); const txs = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); - const hash: `0x${string}` = `0x${serializeToBuffer([archive, txs]).toString('hex')}`; + const hash = keccak256(serializeToBuffer([archive, txs])); const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); return new BlockAttestation(blockHeader, archive, txs, signature); From 7a50a2b9db37c43cb87d7da60c79dcdab5953e2d Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sun, 8 Sep 2024 10:16:15 +0000 Subject: [PATCH 026/123] exp: adjust test nodes --- yarn-project/end-to-end/src/e2e_p2p_network.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index c46adb23f26..6caf8f6bec4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -199,8 +199,8 @@ describe('e2e_p2p_network', () => { // Replace the p2p node implementation of some of the nodes with a spy such that it does not store transactions that are gossiped to it // Original implementation of `processTxFromPeer` will store received transactions in the tx pool. - // We have chosen nodes 0,2 as they do not get chosen to be the sequencer in this test ( node 1 does ). - const nodeToTurnOffTxGossip = [0, 2]; + // We have chosen nodes 0,3 as they do not get chosen to be the sequencer in this test. + const nodeToTurnOffTxGossip = [0, 3]; for (const nodeIndex of nodeToTurnOffTxGossip) { jest .spyOn((nodes[nodeIndex] as any).p2pClient.p2pService, 'processTxFromPeer') From 998f38c690e2763337d975db69edda79078fcee6 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Sun, 8 Sep 2024 17:01:02 +0000 Subject: [PATCH 027/123] chore: add reqresp configuration values to p2p config --- yarn-project/foundation/src/config/env_var.ts | 2 ++ yarn-project/p2p/src/config.ts | 5 ++- yarn-project/p2p/src/mocks/index.ts | 7 +++- .../p2p/src/service/discv5_service.test.ts | 2 ++ .../p2p/src/service/libp2p_service.ts | 2 +- .../p2p/src/service/reqresp/config.ts | 35 +++++++++++++++++++ .../reqresp/p2p_client.integration.test.ts | 2 ++ .../p2p/src/service/reqresp/reqresp.ts | 11 +++--- 8 files changed, 59 insertions(+), 7 deletions(-) create mode 100644 yarn-project/p2p/src/service/reqresp/config.ts diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 770aa8dcd95..9f854364a31 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -37,6 +37,8 @@ export type EnvVar = | 'TX_GOSSIP_VERSION' | 'P2P_QUERY_FOR_IP' | 'P2P_TX_POOL_KEEP_PROVEN_FOR' + | 'P2P_REQRESP_OVERALL_REQUEST_TIMEOUT_MS' + | 'P2P_REQRESP_INDIVIDUAL_REQUEST_TIMEOUT_MS' | 'TELEMETRY' | 'OTEL_SERVICE_NAME' | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 26a81503502..004950685f7 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -6,10 +6,12 @@ import { pickConfigMappings, } from '@aztec/foundation/config'; +import { type P2PReqRespConfig, p2pReqRespConfigMappings } from './service/reqresp/config.js'; + /** * P2P client configuration values. */ -export interface P2PConfig { +export interface P2PConfig extends P2PReqRespConfig { /** * A flag dictating whether the P2P subsystem should be enabled. */ @@ -170,6 +172,7 @@ export const p2pConfigMappings: ConfigMappingsType = { 'How many blocks have to pass after a block is proven before its txs are deleted (zero to delete immediately once proven)', ...numberConfigHelper(0), }, + ...p2pReqRespConfigMappings, }; /** diff --git a/yarn-project/p2p/src/mocks/index.ts b/yarn-project/p2p/src/mocks/index.ts index 18054a39165..c9d0679e3f8 100644 --- a/yarn-project/p2p/src/mocks/index.ts +++ b/yarn-project/p2p/src/mocks/index.ts @@ -4,6 +4,7 @@ import { bootstrap } from '@libp2p/bootstrap'; import { tcp } from '@libp2p/tcp'; import { type Libp2p, type Libp2pOptions, createLibp2p } from 'libp2p'; +import { type P2PReqRespConfig } from '../service/reqresp/config.js'; import { pingHandler, statusHandler } from '../service/reqresp/handlers.js'; import { PING_PROTOCOL, @@ -80,7 +81,11 @@ export const stopNodes = async (nodes: ReqRespNode[]): Promise => { // Create a req resp node, exposing the underlying p2p node export const createReqResp = async (): Promise => { const p2p = await createLibp2pNode(); - const req = new ReqResp(p2p); + const config: P2PReqRespConfig = { + overallRequestTimeoutMs: 4000, + individualRequestTimeoutMs: 2000, + }; + const req = new ReqResp(config, p2p); return { p2p, req, diff --git a/yarn-project/p2p/src/service/discv5_service.test.ts b/yarn-project/p2p/src/service/discv5_service.test.ts index 20e0f2af878..d9487d14bda 100644 --- a/yarn-project/p2p/src/service/discv5_service.test.ts +++ b/yarn-project/p2p/src/service/discv5_service.test.ts @@ -7,6 +7,7 @@ import { BootstrapNode } from '../bootstrap/bootstrap.js'; import { type P2PConfig } from '../config.js'; import { DiscV5Service } from './discV5_service.js'; import { createLibP2PPeerId } from './libp2p_service.js'; +import { DEFAULT_P2P_REQRESP_CONFIG } from './reqresp/config.js'; import { PeerDiscoveryState } from './service.js'; const waitForPeers = (node: DiscV5Service, expectedCount: number): Promise => { @@ -135,6 +136,7 @@ describe('Discv5Service', () => { p2pEnabled: true, l2QueueSize: 100, keepProvenTxsInPoolFor: 0, + ...DEFAULT_P2P_REQRESP_CONFIG, }; return new DiscV5Service(peerId, config); }; diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index caf57a821ea..446c5191734 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -94,7 +94,7 @@ export class LibP2PService implements P2PService { private logger = createDebugLogger('aztec:libp2p_service'), ) { this.peerManager = new PeerManager(node, peerDiscoveryService, config, logger); - this.reqresp = new ReqResp(node); + this.reqresp = new ReqResp(config, node); this.blockReceivedCallback = (block: BlockProposal): Promise => { this.logger.verbose( diff --git a/yarn-project/p2p/src/service/reqresp/config.ts b/yarn-project/p2p/src/service/reqresp/config.ts new file mode 100644 index 00000000000..5ff298939c6 --- /dev/null +++ b/yarn-project/p2p/src/service/reqresp/config.ts @@ -0,0 +1,35 @@ +import { type ConfigMapping, numberConfigHelper } from '@aztec/foundation/config'; + +export const DEFAULT_INDIVIDUAL_REQUEST_TIMEOUT_MS = 2000; +export const DEFAULT_OVERALL_REQUEST_TIMEOUT_MS = 4000; + +// For use in tests. +export const DEFAULT_P2P_REQRESP_CONFIG: P2PReqRespConfig = { + overallRequestTimeoutMs: DEFAULT_OVERALL_REQUEST_TIMEOUT_MS, + individualRequestTimeoutMs: DEFAULT_INDIVIDUAL_REQUEST_TIMEOUT_MS, +}; + +export interface P2PReqRespConfig { + /** + * The overall timeout for a request response operation. + */ + overallRequestTimeoutMs: number; + + /** + * The timeout for an individual request response peer interaction. + */ + individualRequestTimeoutMs: number; +} + +export const p2pReqRespConfigMappings: Record = { + overallRequestTimeoutMs: { + env: 'P2P_REQRESP_OVERALL_REQUEST_TIMEOUT_MS', + description: 'The overall timeout for a request response operation.', + ...numberConfigHelper(DEFAULT_OVERALL_REQUEST_TIMEOUT_MS), + }, + individualRequestTimeoutMs: { + env: 'P2P_REQRESP_INDIVIDUAL_REQUEST_TIMEOUT_MS', + description: 'The timeout for an individual request response peer interaction.', + ...numberConfigHelper(DEFAULT_INDIVIDUAL_REQUEST_TIMEOUT_MS), + }, +}; diff --git a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts index 8ec358ee2f4..31b7f50550f 100644 --- a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts @@ -17,6 +17,7 @@ import { type P2PClient } from '../../client/p2p_client.js'; import { type BootnodeConfig, type P2PConfig } from '../../config.js'; import { type TxPool } from '../../tx_pool/index.js'; import { createLibP2PPeerId } from '../index.js'; +import { DEFAULT_P2P_REQRESP_CONFIG } from './config.js'; /** * Mockify helper for testing purposes. @@ -92,6 +93,7 @@ describe('Req Resp p2p client integration', () => { queryForIp: false, dataDirectory: undefined, l1Contracts: { rollupAddress: EthAddress.ZERO }, + ...DEFAULT_P2P_REQRESP_CONFIG, }; txPool = { diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 29a63de08ea..8c6e19e5d40 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -8,6 +8,7 @@ import { type Libp2p } from 'libp2p'; import { type Uint8ArrayList } from 'uint8arraylist'; import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; +import { type P2PReqRespConfig } from './config.js'; import { DEFAULT_SUB_PROTOCOL_HANDLERS, type ReqRespSubProtocol, @@ -30,14 +31,16 @@ export class ReqResp { private abortController: AbortController = new AbortController(); - // TODO: change defaults and add to configuration - private overallRequestTimeoutMs: number = 4000; - private individualRequestTimeoutMs: number = 2000; + private overallRequestTimeoutMs: number; + private individualRequestTimeoutMs: number; private subProtocolHandlers: ReqRespSubProtocolHandlers = DEFAULT_SUB_PROTOCOL_HANDLERS; - constructor(protected readonly libp2p: Libp2p) { + constructor(config: P2PReqRespConfig, protected readonly libp2p: Libp2p) { this.logger = createDebugLogger('aztec:p2p:reqresp'); + + this.overallRequestTimeoutMs = config.overallRequestTimeoutMs; + this.individualRequestTimeoutMs = config.individualRequestTimeoutMs; } /** From 1c2b151c407eeb7cf12b45c58a775a81bb5cf6dd Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 08:56:55 +0000 Subject: [PATCH 028/123] fix: use abi.encode vs encodePacked --- l1-contracts/src/core/Rollup.sol | 2 +- l1-contracts/test/sparta/Sparta.t.sol | 2 +- .../src/p2p/block_attestation.ts | 7 ++-- .../circuit-types/src/p2p/block_proposal.ts | 19 +++++++++-- .../circuit-types/src/p2p/block_utils.ts | 32 +++++++++++++++++++ yarn-project/circuit-types/src/p2p/index.ts | 1 + yarn-project/circuit-types/src/p2p/mocks.ts | 7 ++-- .../foundation/src/buffer/buffer32.ts | 9 ++++++ .../src/publisher/l1-publisher.ts | 4 +-- .../src/duties/validation_service.ts | 9 ++---- 10 files changed, 72 insertions(+), 20 deletions(-) create mode 100644 yarn-project/circuit-types/src/p2p/block_utils.ts diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index b0b1aef496c..ca9aed08f2d 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -443,7 +443,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { HeaderLib.Header memory header = HeaderLib.decode(_header); setupEpoch(); - bytes32 digest = keccak256(abi.encodePacked(_archive, _txHashes)); + bytes32 digest = keccak256(abi.encode(_archive, _txHashes)); _validateHeader({ _header: header, _signatures: _signatures, diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 0528e475c03..b0f18cf37b7 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -190,7 +190,7 @@ contract SpartaTest is DecoderBase { SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](_signatureCount); - bytes32 digest = keccak256(abi.encodePacked(archive, txHashes)); + bytes32 digest = keccak256(abi.encode(archive, txHashes)); for (uint256 i = 0; i < _signatureCount; i++) { signatures[i] = createSignature(validators[i], digest); } diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index e78fa7237c2..e38effdb368 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -3,9 +3,10 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { keccak256, recoverMessageAddress } from 'viem'; +import { recoverMessageAddress } from 'viem'; import { TxHash } from '../tx/tx_hash.js'; +import { get0xStringHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { Signature } from './signature.js'; import { TopicType, createTopicString } from './topic_type.js'; @@ -55,7 +56,7 @@ export class BlockAttestation extends Gossipable { async getSender() { if (!this.sender) { // Recover the sender from the attestation - const hashed = keccak256(this.getPayload()); + const hashed = get0xStringHashedSignaturePayload(this.archive, this.txHashes); const address = await recoverMessageAddress({ message: { raw: hashed }, signature: this.signature.to0xString(), @@ -68,7 +69,7 @@ export class BlockAttestation extends Gossipable { } getPayload(): Buffer { - return serializeToBuffer([this.archive, this.txHashes]); + return getSignaturePayload(this.archive, this.txHashes); } toBuffer(): Buffer { diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index e9d06208a01..8164e755117 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -3,9 +3,10 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { keccak256, recoverMessageAddress } from 'viem'; +import { recoverMessageAddress } from 'viem'; import { TxHash } from '../tx/tx_hash.js'; +import { get0xStringHashedSignaturePayload, getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { Signature } from './signature.js'; import { TopicType, createTopicString } from './topic_type.js'; @@ -49,13 +50,25 @@ export class BlockProposal extends Gossipable { return BlockProposalHash.fromField(this.archive); } + static async createProposalFromSigner( + header: Header, + archive: Fr, + txs: TxHash[], + payloadSigner: (payload: Buffer) => Promise, + ) { + const hashed = getHashedSignaturePayload(archive, txs); + const sig = await payloadSigner(hashed); + + return new BlockProposal(header, archive, txs, sig); + } + /**Get Sender * Lazily evaluate the sender of the proposal; result is cached */ async getSender() { if (!this.sender) { // performance note(): this signature method requires another hash behind the scenes - const hashed = keccak256(this.getPayload()); + const hashed = get0xStringHashedSignaturePayload(this.archive, this.txs); const address = await recoverMessageAddress({ message: { raw: hashed }, signature: this.signature.to0xString(), @@ -68,7 +81,7 @@ export class BlockProposal extends Gossipable { } getPayload() { - return serializeToBuffer([this.archive, this.txs]); + return getSignaturePayload(this.archive, this.txs); } toBuffer(): Buffer { diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts new file mode 100644 index 00000000000..6eca0070a18 --- /dev/null +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -0,0 +1,32 @@ +import { Buffer32 } from '@aztec/foundation/buffer'; +import { keccak256 as keccak256Buffer } from '@aztec/foundation/crypto'; +import { type Fr } from '@aztec/foundation/fields'; +import { serializeToBuffer } from '@aztec/foundation/serialize'; + +import { keccak256 as keccak2560xString } from 'viem'; + +import { type TxHash } from '../tx/tx_hash.js'; + +/** + * Get the payload for the signature of the block proposal + * @param archive - The archive of the block + * @param txs - The transactions in the block + * @returns The payload for the signature of the block proposal + */ +export function getSignaturePayload(archive: Fr, txs: TxHash[]) { + return serializeToBuffer([archive, Buffer32.fromNumber(txs.length), txs]); +} + +/** + * Get the hashed payload for the signature of the block proposal + * @param archive - The archive of the block + * @param txs - The transactions in the block + * @returns The hashed payload for the signature of the block proposal + */ +export function getHashedSignaturePayload(archive: Fr, txs: TxHash[]): Buffer { + return keccak256Buffer(getSignaturePayload(archive, txs)); +} + +export function get0xStringHashedSignaturePayload(archive: Fr, txs: TxHash[]): `0x${string}` { + return keccak2560xString(getSignaturePayload(archive, txs)); +} diff --git a/yarn-project/circuit-types/src/p2p/index.ts b/yarn-project/circuit-types/src/p2p/index.ts index a8a7f011fd0..e6c268523c1 100644 --- a/yarn-project/circuit-types/src/p2p/index.ts +++ b/yarn-project/circuit-types/src/p2p/index.ts @@ -4,3 +4,4 @@ export * from './interface.js'; export * from './gossipable.js'; export * from './topic_type.js'; export * from './signature.js'; +export * from './block_utils.js'; diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 0c57accf546..2e4b9495d66 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -1,14 +1,13 @@ import { makeHeader } from '@aztec/circuits.js/testing'; import { Fr } from '@aztec/foundation/fields'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type PrivateKeyAccount } from 'viem'; -import { keccak256 } from 'viem'; import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; +import { get0xStringHashedSignaturePayload } from './block_utils.js'; import { Signature } from './signature.js'; export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise => { @@ -17,7 +16,7 @@ export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise TxHash.random()); - const hash = keccak256(serializeToBuffer([archive, txs])); + const hash = get0xStringHashedSignaturePayload(archive, txs); const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); return new BlockProposal(blockHeader, archive, txs, signature); @@ -30,7 +29,7 @@ export const makeBlockAttestation = async (signer?: PrivateKeyAccount): Promise< const blockHeader = makeHeader(1); const archive = Fr.random(); const txs = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); - const hash = keccak256(serializeToBuffer([archive, txs])); + const hash = get0xStringHashedSignaturePayload(archive, txs); const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); return new BlockAttestation(blockHeader, archive, txs, signature); diff --git a/yarn-project/foundation/src/buffer/buffer32.ts b/yarn-project/foundation/src/buffer/buffer32.ts index 399cdb009e3..736c0ada457 100644 --- a/yarn-project/foundation/src/buffer/buffer32.ts +++ b/yarn-project/foundation/src/buffer/buffer32.ts @@ -116,6 +116,15 @@ export class Buffer32 { return new Buffer32(Buffer.from(str, 'hex')); } + /** + * Converts a number into a Buffer32 object. + * @param num - The number to convert. + * @returns A new Buffer32 object. + */ + public static fromNumber(num: number): Buffer32 { + return new Buffer32(serializeBigInt(BigInt(num), Buffer32.SIZE)); + } + /** * Generates a random Buffer32. * @returns A new Buffer32 object. diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 4c1937a6b79..8930d86b890 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,8 +1,8 @@ import { type L2Block, type Signature, type TxHash } from '@aztec/circuit-types'; +import { getHashedSignaturePayload } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { ETHEREUM_SLOT_DURATION, EthAddress, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; -import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { serializeToBuffer } from '@aztec/foundation/serialize'; @@ -255,7 +255,7 @@ export class L1Publisher { blockHash: block.hash().toString(), }; - const digest = keccak256(serializeToBuffer(block.archive.root, txHashes ?? [])); + const digest = getHashedSignaturePayload(block.archive.root, txHashes ?? []); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 1115d72208f..79e741a4ea6 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -2,7 +2,6 @@ import { BlockAttestation, BlockProposal, type TxHash } from '@aztec/circuit-typ import { type Header } from '@aztec/circuits.js'; import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type ValidatorKeyStore } from '../key_store/interface.js'; @@ -19,12 +18,9 @@ export class ValidationService { * @returns A block proposal signing the above information (not the current implementation!!!) */ async createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { - // NOTE: just signing the archive and txs for now - const payload = serializeToBuffer([archive, txs]); - const hashed = keccak256(payload); - const sig = await this.keyStore.sign(hashed); + const payloadSigner = (payload: Buffer) => this.keyStore.sign(payload); - return new BlockProposal(header, archive, txs, sig); + return BlockProposal.createProposalFromSigner(header, archive, txs, payloadSigner); } /** @@ -40,6 +36,7 @@ export class ValidationService { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct const buf = keccak256(proposal.getPayload()); + console.log("attestation buf", buf); const sig = await this.keyStore.sign(buf); return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); } From e6e7f6b2072a8b6d382c41d0a73e2e76c865649d Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 10:17:37 +0000 Subject: [PATCH 029/123] fix --- yarn-project/circuit-types/src/p2p/block_utils.ts | 10 ++++++---- yarn-project/p2p/src/client/p2p_client.ts | 6 ++++-- yarn-project/p2p/src/service/service.ts | 2 +- .../src/duties/validation_service.ts | 3 +-- yarn-project/validator-client/src/validator.ts | 13 ++++++++++--- 5 files changed, 22 insertions(+), 12 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts index 6eca0070a18..cd1c5a48842 100644 --- a/yarn-project/circuit-types/src/p2p/block_utils.ts +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -1,9 +1,7 @@ -import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256 as keccak256Buffer } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; -import { keccak256 as keccak2560xString } from 'viem'; +import { encodeAbiParameters, keccak256 as keccak2560xString, parseAbiParameters } from 'viem'; import { type TxHash } from '../tx/tx_hash.js'; @@ -14,7 +12,11 @@ import { type TxHash } from '../tx/tx_hash.js'; * @returns The payload for the signature of the block proposal */ export function getSignaturePayload(archive: Fr, txs: TxHash[]) { - return serializeToBuffer([archive, Buffer32.fromNumber(txs.length), txs]); + const abi = parseAbiParameters('bytes32, bytes32[]'); + const txArray = txs.map(tx => tx.to0xString()); + const encodedData = encodeAbiParameters(abi, [archive.toString(), txArray] as const); + + return Buffer.from(encodedData.slice(2), 'hex'); } /** diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 7534c320164..9fec7856fae 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -70,7 +70,7 @@ export interface P2P { */ // REVIEW: https://github.com/AztecProtocol/aztec-packages/issues/7963 // ^ This pattern is not my favorite (md) - registerBlockProposalHandler(handler: (block: BlockProposal) => Promise): void; + registerBlockProposalHandler(handler: (block: BlockProposal) => Promise): void; /** * Request a list of transactions from another peer by their tx hashes. @@ -287,7 +287,7 @@ export class P2PClient implements P2P { // REVIEW: https://github.com/AztecProtocol/aztec-packages/issues/7963 // ^ This pattern is not my favorite (md) - public registerBlockProposalHandler(handler: (block: BlockProposal) => Promise): void { + public registerBlockProposalHandler(handler: (block: BlockProposal) => Promise): void { this.p2pService.registerBlockReceivedCallback(handler); } @@ -319,6 +319,8 @@ export class P2PClient implements P2P { this.log.debug(`Requested ${txHash.toString()} from peer | success = ${!!tx}`); if (tx) { + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/8485): This check is not sufficient to validate the transaction. We need to validate the entire proof. + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/8483): alter peer scoring system for a validator that returns an invalid transcation await this.txPool.addTxs([tx]); } diff --git a/yarn-project/p2p/src/service/service.ts b/yarn-project/p2p/src/service/service.ts index 607927d3454..ce486e0b2bb 100644 --- a/yarn-project/p2p/src/service/service.ts +++ b/yarn-project/p2p/src/service/service.ts @@ -46,7 +46,7 @@ export interface P2PService { ): Promise | undefined>; // Leaky abstraction: fix https://github.com/AztecProtocol/aztec-packages/issues/7963 - registerBlockReceivedCallback(callback: (block: BlockProposal) => Promise): void; + registerBlockReceivedCallback(callback: (block: BlockProposal) => Promise): void; getEnr(): ENR | undefined; } diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 79e741a4ea6..3a4ec7e8c6d 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -17,7 +17,7 @@ export class ValidationService { * * @returns A block proposal signing the above information (not the current implementation!!!) */ - async createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { + createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { const payloadSigner = (payload: Buffer) => this.keyStore.sign(payload); return BlockProposal.createProposalFromSigner(header, archive, txs, payloadSigner); @@ -36,7 +36,6 @@ export class ValidationService { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct const buf = keccak256(proposal.getPayload()); - console.log("attestation buf", buf); const sig = await this.keyStore.sign(buf); return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index a4ccd25f6ba..1c8442ef367 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -63,15 +63,22 @@ export class ValidatorClient implements Validator { } public registerBlockProposalHandler() { - const handler = (block: BlockProposal): Promise => { + const handler = (block: BlockProposal): Promise => { return this.attestToProposal(block); }; this.p2pClient.registerBlockProposalHandler(handler); } - async attestToProposal(proposal: BlockProposal): Promise { + async attestToProposal(proposal: BlockProposal): Promise { // Check that all of the tranasctions in the proposal are available in the tx pool before attesting - await this.ensureTransactionsAreAvailable(proposal); + try { + await this.ensureTransactionsAreAvailable(proposal); + } catch (error: any) { + if (error instanceof TransactionsNotAvailableError) { + this.log.error(`Transactions not available, skipping attestation ${error.message}`); + } + return undefined; + } this.log.debug(`Transactions available, attesting to proposal with ${proposal.txs.length} transactions`); // If the above function does not throw an error, then we can attest to the proposal From cde6283c83db0efbf3b9d2b37b6adf8f377b3105 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 11:09:53 +0000 Subject: [PATCH 030/123] fix: merge fix --- .../src/composed/integration_l1_publisher.test.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index bc59c817ebc..ea3b881330f 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -427,12 +427,13 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, - functionName: 'propose', + functionName: 'proposeWithBody', args: [ `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, `0x${block.header.hash().toBuffer().toString('hex')}`, [], + [], `0x${block.body.toBuffer().toString('hex')}`, ], }); @@ -530,12 +531,13 @@ describe('L1Publisher integration', () => { i == 0 ? encodeFunctionData({ abi: RollupAbi, - functionName: 'propose', + functionName: 'proposeWithBody', args: [ `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, `0x${block.header.hash().toBuffer().toString('hex')}`, [], + [], `0x${block.body.toBuffer().toString('hex')}`, ], }) @@ -547,6 +549,7 @@ describe('L1Publisher integration', () => { `0x${block.archive.root.toBuffer().toString('hex')}`, `0x${block.header.hash().toBuffer().toString('hex')}`, [], + [] ], }); expect(ethTx.input).toEqual(expectedData); From 8290c99a5d01119ca819941067d3e6a6d046ea77 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 11:10:50 +0000 Subject: [PATCH 031/123] fmt --- .../end-to-end/src/composed/integration_l1_publisher.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index ea3b881330f..89041117a11 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -549,7 +549,7 @@ describe('L1Publisher integration', () => { `0x${block.archive.root.toBuffer().toString('hex')}`, `0x${block.header.hash().toBuffer().toString('hex')}`, [], - [] + [], ], }); expect(ethTx.input).toEqual(expectedData); From b1892700da6eb85562a774cefa883676ad931897 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 15:16:00 +0000 Subject: [PATCH 032/123] feat: initial rate limiter impl --- .../p2p/src/service/reqresp/interface.ts | 41 ++++++ .../src/service/reqresp/rate_limiter/index.ts | 1 + .../reqresp/rate_limiter/rate_limiter.ts | 137 ++++++++++++++++++ .../reqresp/rate_limiter/rate_limits.ts | 35 +++++ .../p2p/src/service/reqresp/reqresp.ts | 15 +- 5 files changed, 228 insertions(+), 1 deletion(-) create mode 100644 yarn-project/p2p/src/service/reqresp/rate_limiter/index.ts create mode 100644 yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts create mode 100644 yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 5ae61d0389d..c95c1785606 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -16,6 +16,47 @@ export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | */ export type ReqRespSubProtocolHandler = (msg: Buffer) => Promise; +/** + * A type mapping from supprotocol to it's description + */ +export type ReqRespSubProtocols = Record; + +export type ReqRespSubProtocolRateLimits = Record; + +/** + * Each Request Response Sub Protocol define's it's own handler and rate limits + */ +export type ReqRespSubProtocolDescription = { + handler: ReqRespSubProtocolHandler; + rateLimits: ProtocolRateLimitQuota +} + +/** + * A rate limit quota + */ +export interface RateLimitQuota { + /** + * The time window in ms + */ + quotaTimeMs: number; + /** + * The number of requests allowed within the time window + */ + quotaCount: number; +} + +export interface ProtocolRateLimitQuota { + /** + * The rate limit quota for a single peer + */ + peerLimit: RateLimitQuota; + /** + * The rate limit quota for the global peer set + */ + globalLimit: RateLimitQuota; +} + + /** * A type mapping from supprotocol to it's handling funciton */ diff --git a/yarn-project/p2p/src/service/reqresp/rate_limiter/index.ts b/yarn-project/p2p/src/service/reqresp/rate_limiter/index.ts new file mode 100644 index 00000000000..86141061e71 --- /dev/null +++ b/yarn-project/p2p/src/service/reqresp/rate_limiter/index.ts @@ -0,0 +1 @@ +export { RequestResponseRateLimiter } from "./rate_limiter.js"; \ No newline at end of file diff --git a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts new file mode 100644 index 00000000000..70a6b9c714c --- /dev/null +++ b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts @@ -0,0 +1,137 @@ +/** + * @attribution Rate limiter approach implemented in the lodestar ethereum 2 client. + * Rationale is that if it was good enough for them, then it should be good enough for us. + * https://github.com/ChainSafe/lodestar + */ + +import { ReqRespSubProtocol, ReqRespSubProtocolRateLimits, TX_REQ_PROTOCOL } from "../interface.js"; +import { PeerId } from "@libp2p/interface"; +import { DEFAULT_RATE_LIMITS } from "./rate_limits.js"; + +// Check for disconnected peers every 10 minutes +// at which time we will clean up their rate limits +// TODO(md): setup a job that will run this cleanup functionality periodically +const CHECK_DISCONNECTED_PEERS_INTERVAL_MS = 10 * 60 * 1000; + +// TODO: make this a nice interface like in the requests that takes the SubProtocol and defines the rate limits +// can probably borrow the types from there + + +class GCRARateLimiter { + private vst: number; + private readonly emissionInterval: number; + private readonly limitInterval: number; + + constructor(quotaCount: number, quotaTimeMs: number) { + this.emissionInterval = quotaTimeMs / quotaCount; + this.limitInterval = quotaTimeMs; + this.vst = Date.now(); + } + + allow(): boolean { + const now = Date.now(); + const newVst = Math.max(now, this.vst + this.emissionInterval); + + if (newVst - now > this.limitInterval) { + this.vst = newVst; + return true; + } + + return false; + } +} + +interface PeerRateLimiter { + limiter: GCRARateLimiter; + lastAccess: number; +} + +class SubProtocolRateLimiter { + private peerLimiters: Map = new Map(); + private globalLimiter: GCRARateLimiter; + private readonly peerRate: number; + private readonly peerBurst: number; + + // TODO:DOC + private readonly cleanupInterval: NodeJS.Timeout; + + constructor(peerRate: number, peerBurst: number, globalRate: number, globalBurst: number) { + this.peerLimiters = new Map(); + this.globalLimiter = new GCRARateLimiter(globalRate, globalBurst); + this.peerRate = peerRate; + this.peerBurst = peerBurst; + + this.cleanupInterval = setInterval(() => { + this.cleanupInactivePeers(); + }, CHECK_DISCONNECTED_PEERS_INTERVAL_MS); + } + + allow(peerId: PeerId): boolean { + if (!this.globalLimiter.allow()) { + return false; + } + + const peerIdStr = peerId.toString(); + let peerLimiter: PeerRateLimiter | undefined = this.peerLimiters.get(peerIdStr); + if (!peerLimiter) { + // Create a limiter for this peer + peerLimiter = { + limiter: new GCRARateLimiter(this.peerRate, this.peerBurst), + lastAccess: Date.now(), + }; + this.peerLimiters.set(peerIdStr, peerLimiter); + } else { + peerLimiter.lastAccess = Date.now(); + } + return peerLimiter.limiter.allow(); + } + + private cleanupInactivePeers() { + const now = Date.now(); + this.peerLimiters.forEach((peerLimiter, peerId) => { + if (now - peerLimiter.lastAccess > CHECK_DISCONNECTED_PEERS_INTERVAL_MS) { + this.peerLimiters.delete(peerId); + } + }); + } + + /** + * Make sure to call destroy on each of the sub protocol rate limiters when cleaning up + */ + destroy() { + clearInterval(this.cleanupInterval); + } +} + +export class RequestResponseRateLimiter { + private subProtocolRateLimiters: Map; + + constructor(rateLimits: ReqRespSubProtocolRateLimits = DEFAULT_RATE_LIMITS) { + this.subProtocolRateLimiters = new Map(); + + for (const [subProtocol, protocolLimits] of Object.entries(rateLimits)) { + this.subProtocolRateLimiters.set( + subProtocol as ReqRespSubProtocol, + new SubProtocolRateLimiter( + protocolLimits.peerLimit.quotaCount, + protocolLimits.peerLimit.quotaTimeMs, + protocolLimits.globalLimit.quotaCount, + protocolLimits.globalLimit.quotaTimeMs + )); + } + } + + allow(subProtocol: ReqRespSubProtocol, peerId: PeerId): boolean { + const limiter = this.subProtocolRateLimiters.get(subProtocol); + if (!limiter) { + // TODO: maybe throw an error here if no rate limiter is configured? + return true; + } + return limiter.allow(peerId); + } + + destroy() { + this.subProtocolRateLimiters.forEach((limiter) => limiter.destroy()); + } +} + diff --git a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts new file mode 100644 index 00000000000..c8807812138 --- /dev/null +++ b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts @@ -0,0 +1,35 @@ +import { PING_PROTOCOL, ReqRespSubProtocolRateLimits, STATUS_PROTOCOL, TX_REQ_PROTOCOL } from "../interface.js"; + +// TODO(md): these defaults need to be tuned +export const DEFAULT_RATE_LIMITS: ReqRespSubProtocolRateLimits = { + [PING_PROTOCOL]: { + peerLimit: { + quotaTimeMs: 1000, + quotaCount: 5, + }, + globalLimit: { + quotaTimeMs: 1000, + quotaCount: 10, + }, + }, + [STATUS_PROTOCOL]: { + peerLimit: { + quotaTimeMs: 1000, + quotaCount: 5, + }, + globalLimit: { + quotaTimeMs: 1000, + quotaCount: 10, + }, + }, + [TX_REQ_PROTOCOL]: { + peerLimit: { + quotaTimeMs: 1000, + quotaCount: 5, + }, + globalLimit: { + quotaTimeMs: 1000, + quotaCount: 10, + }, + } +} \ No newline at end of file diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 8c6e19e5d40..2ee27e6f641 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -8,6 +8,7 @@ import { type Libp2p } from 'libp2p'; import { type Uint8ArrayList } from 'uint8arraylist'; import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; +import { RequestResponseRateLimiter } from './rate_limiter/rate_limiter.js'; import { type P2PReqRespConfig } from './config.js'; import { DEFAULT_SUB_PROTOCOL_HANDLERS, @@ -35,12 +36,15 @@ export class ReqResp { private individualRequestTimeoutMs: number; private subProtocolHandlers: ReqRespSubProtocolHandlers = DEFAULT_SUB_PROTOCOL_HANDLERS; + private rateLimiter: RequestResponseRateLimiter; constructor(config: P2PReqRespConfig, protected readonly libp2p: Libp2p) { this.logger = createDebugLogger('aztec:p2p:reqresp'); this.overallRequestTimeoutMs = config.overallRequestTimeoutMs; this.individualRequestTimeoutMs = config.individualRequestTimeoutMs; + + this.rateLimiter = new RequestResponseRateLimiter(); } /** @@ -62,6 +66,7 @@ export class ReqResp { for (const protocol of Object.keys(this.subProtocolHandlers)) { await this.libp2p.unhandle(protocol); } + this.rateLimiter.destroy(); await this.libp2p.stop(); this.abortController.abort(); } @@ -167,8 +172,16 @@ export class ReqResp { * * @param param0 - The incoming stream data */ - private async streamHandler(protocol: ReqRespSubProtocol, { stream }: IncomingStreamData) { + private async streamHandler(protocol: ReqRespSubProtocol, { stream, connection }: IncomingStreamData) { // Store a reference to from this for the async generator + if (!this.rateLimiter.allow(protocol, connection.remotePeer)) { + this.logger.warn(`Rate limit exceeded for ${protocol} from ${connection.remotePeer}`); + + // TODO: handle changing peer scoring for failed rate limit, maybe differentiate between global and peer limits here when punishing + await stream.close(); + return; + } + const handler = this.subProtocolHandlers[protocol]; try { From 97a6a14f2476d0172f979f88f596428c46761dd2 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 20:07:54 +0000 Subject: [PATCH 033/123] feat: tests + documentation --- .../p2p/src/service/reqresp/interface.ts | 5 +- .../src/service/reqresp/rate_limiter/index.ts | 2 +- .../reqresp/rate_limiter/rate_limiter.test.ts | 142 +++++++++ .../reqresp/rate_limiter/rate_limiter.ts | 280 +++++++++++------- .../reqresp/rate_limiter/rate_limits.ts | 60 ++-- .../p2p/src/service/reqresp/reqresp.ts | 2 +- 6 files changed, 348 insertions(+), 143 deletions(-) create mode 100644 yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.test.ts diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index c95c1785606..41cce2ac954 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -28,8 +28,8 @@ export type ReqRespSubProtocolRateLimits = Record { + return new MockPeerId(id) as unknown as PeerId; +}; + +describe('rate limiter', () => { + let rateLimiter: RequestResponseRateLimiter; + + beforeEach(() => { + jest.useFakeTimers(); + const config = { + [TX_REQ_PROTOCOL]: { + // One request every 200ms + peerLimit: { + quotaCount: 5, + quotaTimeMs: 1000, + }, + // One request every 100ms + globalLimit: { + quotaCount: 10, + quotaTimeMs: 1000, + }, + }, + } as ReqRespSubProtocolRateLimits; // force type as we will not provide descriptions of all protocols + rateLimiter = new RequestResponseRateLimiter(config); + }); + + afterEach(() => { + jest.useRealTimers(); + rateLimiter.stop(); + }); + + it('Should allow requests within a peer limit', () => { + const peerId = makePeer('peer1'); + for (let i = 0; i < 5; i++) { + jest.advanceTimersByTime(200); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(true); + } + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(false); + jest.advanceTimersByTime(200); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(true); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(false); + }); + + it('Should allow requests within the global limit', () => { + for (let i = 0; i < 10; i++) { + jest.advanceTimersByTime(99); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, makePeer(`peer${i}`))).toBe(true); + } + expect(rateLimiter.allow(TX_REQ_PROTOCOL, makePeer('nolettoinno'))).toBe(false); + }); + + it('Should reset after quota has passed', () => { + const peerId = makePeer('peer1'); + for (let i = 0; i < 5; i++) { + jest.advanceTimersByTime(200); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(true); + } + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(false); + jest.advanceTimersByTime(1000); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(true); + }); + + it('Should handle multiple protocols separately', () => { + const config = { + [TX_REQ_PROTOCOL]: { + peerLimit: { + quotaCount: 5, + quotaTimeMs: 1000, + }, + globalLimit: { + quotaCount: 10, + quotaTimeMs: 1000, + }, + }, + [PING_PROTOCOL]: { + peerLimit: { + quotaCount: 2, + quotaTimeMs: 1000, + }, + globalLimit: { + quotaCount: 4, + quotaTimeMs: 1000, + }, + }, + } as ReqRespSubProtocolRateLimits; + const multiProtocolRateLimiter = new RequestResponseRateLimiter(config); + + const peerId = makePeer('peer1'); + + // Protocol 1 + for (let i = 0; i < 5; i++) { + jest.advanceTimersByTime(200); + expect(multiProtocolRateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(true); + } + expect(multiProtocolRateLimiter.allow(TX_REQ_PROTOCOL, peerId)).toBe(false); + + // Protocol 2 + for (let i = 0; i < 2; i++) { + jest.advanceTimersByTime(500); + expect(multiProtocolRateLimiter.allow(PING_PROTOCOL, peerId)).toBe(true); + } + expect(multiProtocolRateLimiter.allow(PING_PROTOCOL, peerId)).toBe(false); + + multiProtocolRateLimiter.stop(); + }); + + it('Should allow requests if no rate limiter is configured', () => { + const rateLimiter = new RequestResponseRateLimiter({} as ReqRespSubProtocolRateLimits); + expect(rateLimiter.allow(TX_REQ_PROTOCOL, makePeer('peer1'))).toBe(true); + }); + + it('Should smooth out spam', () => { + const requests = 1000; + const peers = 100; + let allowedRequests = 0; + + for (let i = 0; i < requests; i++) { + const peerId = makePeer(`peer${i % peers}`); + if (rateLimiter.allow(TX_REQ_PROTOCOL, peerId)) { + allowedRequests++; + } + jest.advanceTimersByTime(5); + } + expect(allowedRequests).toBe(50); + }); +}); diff --git a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts index 70a6b9c714c..f92ad0eb81e 100644 --- a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts +++ b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts @@ -3,135 +3,199 @@ * Rationale is that if it was good enough for them, then it should be good enough for us. * https://github.com/ChainSafe/lodestar */ +import { type PeerId } from '@libp2p/interface'; -import { ReqRespSubProtocol, ReqRespSubProtocolRateLimits, TX_REQ_PROTOCOL } from "../interface.js"; -import { PeerId } from "@libp2p/interface"; -import { DEFAULT_RATE_LIMITS } from "./rate_limits.js"; +import { type ReqRespSubProtocol, type ReqRespSubProtocolRateLimits } from '../interface.js'; +import { DEFAULT_RATE_LIMITS } from './rate_limits.js'; // Check for disconnected peers every 10 minutes -// at which time we will clean up their rate limits -// TODO(md): setup a job that will run this cleanup functionality periodically const CHECK_DISCONNECTED_PEERS_INTERVAL_MS = 10 * 60 * 1000; -// TODO: make this a nice interface like in the requests that takes the SubProtocol and defines the rate limits -// can probably borrow the types from there - - -class GCRARateLimiter { - private vst: number; - private readonly emissionInterval: number; - private readonly limitInterval: number; - - constructor(quotaCount: number, quotaTimeMs: number) { - this.emissionInterval = quotaTimeMs / quotaCount; - this.limitInterval = quotaTimeMs; - this.vst = Date.now(); +/** + * GCRARateLimiter: A Generic Cell Rate Algorithm (GCRA) based rate limiter. + * + * How it works: + * 1. The rate limiter allows a certain number of operations (quotaCount) within a specified + * time interval (quotaTimeMs). + * 2. It uses a "virtual scheduling time" (VST) to determine when the next operation should be allowed. + * 3. When an operation is requested, the limiter checks if enough time has passed since the last + * allowed operation. + * 4. If sufficient time has passed, the operation is allowed, and the VST is updated. + * 5. If not enough time has passed, the operation is denied. + * + * The limiter also allows for short bursts of activity, as long as the overall rate doesn't exceed + * the specified quota over time. + * + * Usage example: + * ``` + * const limiter = new GCRARateLimiter(100, 60000); // 100 operations per minute + * ``` + */ +export class GCRARateLimiter { + // Virtual scheduling time: i.e. the time at which we should allow the next request + private vst: number; + // The interval at which we emit a new token + private readonly emissionInterval: number; + // The interval over which we limit the number of requests + private readonly limitInterval: number; + + /** + * @param quotaCount - The number of requests to allow over the limit interval + * @param quotaTimeMs - The time interval over which the quotaCount applies + */ + constructor(quotaCount: number, quotaTimeMs: number) { + this.emissionInterval = quotaTimeMs / quotaCount; + this.limitInterval = quotaTimeMs; + this.vst = Date.now(); + } + + allow(): boolean { + const now = Date.now(); + if (now < this.vst) { + return false; } - allow(): boolean { - const now = Date.now(); - const newVst = Math.max(now, this.vst + this.emissionInterval); - - if (newVst - now > this.limitInterval) { - this.vst = newVst; - return true; - } - - return false; + const newVst = this.vst + this.emissionInterval; + if (newVst - now <= this.limitInterval) { + this.vst = newVst; + return true; } + + return false; + } } interface PeerRateLimiter { - limiter: GCRARateLimiter; - lastAccess: number; + // The rate limiter for this peer + limiter: GCRARateLimiter; + // The last time the peer was accessed - used to determine if the peer is still connected + lastAccess: number; } -class SubProtocolRateLimiter { - private peerLimiters: Map = new Map(); - private globalLimiter: GCRARateLimiter; - private readonly peerRate: number; - private readonly peerBurst: number; - - // TODO:DOC - private readonly cleanupInterval: NodeJS.Timeout; - - constructor(peerRate: number, peerBurst: number, globalRate: number, globalBurst: number) { - this.peerLimiters = new Map(); - this.globalLimiter = new GCRARateLimiter(globalRate, globalBurst); - this.peerRate = peerRate; - this.peerBurst = peerBurst; - - this.cleanupInterval = setInterval(() => { - this.cleanupInactivePeers(); - }, CHECK_DISCONNECTED_PEERS_INTERVAL_MS); - } - - allow(peerId: PeerId): boolean { - if (!this.globalLimiter.allow()) { - return false; - } - - const peerIdStr = peerId.toString(); - let peerLimiter: PeerRateLimiter | undefined = this.peerLimiters.get(peerIdStr); - if (!peerLimiter) { - // Create a limiter for this peer - peerLimiter = { - limiter: new GCRARateLimiter(this.peerRate, this.peerBurst), - lastAccess: Date.now(), - }; - this.peerLimiters.set(peerIdStr, peerLimiter); - } else { - peerLimiter.lastAccess = Date.now(); - } - return peerLimiter.limiter.allow(); - } - - private cleanupInactivePeers() { - const now = Date.now(); - this.peerLimiters.forEach((peerLimiter, peerId) => { - if (now - peerLimiter.lastAccess > CHECK_DISCONNECTED_PEERS_INTERVAL_MS) { - this.peerLimiters.delete(peerId); - } - }); +/** + * SubProtocolRateLimiter: A rate limiter for managing request rates on a per-peer and global basis for a specific subprotocol. + * + * This class provides a two-tier rate limiting system: + * 1. A global rate limit for all requests across all peers for this subprotocol. + * 2. Individual rate limits for each peer. + * + * How it works: + * - When a request comes in, it first checks against the global rate limit. + * - If the global limit allows, it then checks against the specific peer's rate limit. + * - The request is only allowed if both the global and peer-specific limits allow it. + * - It automatically creates and manages rate limiters for new peers as they make requests. + * - It periodically cleans up rate limiters for inactive peers to conserve memory. + * + * Note: Remember to call `start()` to begin the cleanup process and `stop()` when shutting down to clear the cleanup interval. + */ +export class SubProtocolRateLimiter { + private peerLimiters: Map = new Map(); + private globalLimiter: GCRARateLimiter; + private readonly peerQuotaCount: number; + private readonly peerQuotaTimeMs: number; + + private cleanupInterval: NodeJS.Timeout | undefined = undefined; + + constructor(peerQuotaCount: number, peerQuotaTimeMs: number, globalQuotaCount: number, globalQuotaTimeMs: number) { + this.peerLimiters = new Map(); + this.globalLimiter = new GCRARateLimiter(globalQuotaCount, globalQuotaTimeMs); + this.peerQuotaCount = peerQuotaCount; + this.peerQuotaTimeMs = peerQuotaTimeMs; + } + + start() { + this.cleanupInterval = setInterval(() => { + this.cleanupInactivePeers(); + }, CHECK_DISCONNECTED_PEERS_INTERVAL_MS); + } + + allow(peerId: PeerId): boolean { + if (!this.globalLimiter.allow()) { + return false; } - /** - * Make sure to call destroy on each of the sub protocol rate limiters when cleaning up - */ - destroy() { - clearInterval(this.cleanupInterval); + const peerIdStr = peerId.toString(); + let peerLimiter: PeerRateLimiter | undefined = this.peerLimiters.get(peerIdStr); + if (!peerLimiter) { + // Create a limiter for this peer + peerLimiter = { + limiter: new GCRARateLimiter(this.peerQuotaCount, this.peerQuotaTimeMs), + lastAccess: Date.now(), + }; + this.peerLimiters.set(peerIdStr, peerLimiter); + } else { + peerLimiter.lastAccess = Date.now(); } + return peerLimiter.limiter.allow(); + } + + private cleanupInactivePeers() { + const now = Date.now(); + this.peerLimiters.forEach((peerLimiter, peerId) => { + if (now - peerLimiter.lastAccess > CHECK_DISCONNECTED_PEERS_INTERVAL_MS) { + this.peerLimiters.delete(peerId); + } + }); + } + + /** + * Make sure to call destroy on each of the sub protocol rate limiters when cleaning up + */ + stop() { + clearInterval(this.cleanupInterval); + } } +/** + * RequestResponseRateLimiter. + * + * A rate limiter that is protocol aware, then peer aware. + * SubProtocols can have their own global / peer level rate limits. + * + * How it works: + * - Initializes with a set of rate limit configurations for different subprotocols. + * - Creates a separate SubProtocolRateLimiter for each configured subprotocol. + * - When a request comes in, it routes the rate limiting decision to the appropriate subprotocol limiter. + * + * Usage: + * ``` + * const rateLimits = { + * subprotocol1: { peerLimit: { quotaCount: 10, quotaTimeMs: 1000 }, globalLimit: { quotaCount: 100, quotaTimeMs: 1000 } }, + * subprotocol2: { peerLimit: { quotaCount: 5, quotaTimeMs: 1000 }, globalLimit: { quotaCount: 50, quotaTimeMs: 1000 } } + * }; + * const limiter = new RequestResponseRateLimiter(rateLimits); + * + * Note: Ensure to call `stop()` when shutting down to properly clean up all subprotocol limiters. + */ export class RequestResponseRateLimiter { - private subProtocolRateLimiters: Map; - - constructor(rateLimits: ReqRespSubProtocolRateLimits = DEFAULT_RATE_LIMITS) { - this.subProtocolRateLimiters = new Map(); - - for (const [subProtocol, protocolLimits] of Object.entries(rateLimits)) { - this.subProtocolRateLimiters.set( - subProtocol as ReqRespSubProtocol, - new SubProtocolRateLimiter( - protocolLimits.peerLimit.quotaCount, - protocolLimits.peerLimit.quotaTimeMs, - protocolLimits.globalLimit.quotaCount, - protocolLimits.globalLimit.quotaTimeMs - )); - } + private subProtocolRateLimiters: Map; + + constructor(rateLimits: ReqRespSubProtocolRateLimits = DEFAULT_RATE_LIMITS) { + this.subProtocolRateLimiters = new Map(); + + for (const [subProtocol, protocolLimits] of Object.entries(rateLimits)) { + this.subProtocolRateLimiters.set( + subProtocol as ReqRespSubProtocol, + new SubProtocolRateLimiter( + protocolLimits.peerLimit.quotaCount, + protocolLimits.peerLimit.quotaTimeMs, + protocolLimits.globalLimit.quotaCount, + protocolLimits.globalLimit.quotaTimeMs, + ), + ); } + } - allow(subProtocol: ReqRespSubProtocol, peerId: PeerId): boolean { - const limiter = this.subProtocolRateLimiters.get(subProtocol); - if (!limiter) { - // TODO: maybe throw an error here if no rate limiter is configured? - return true; - } - return limiter.allow(peerId); + allow(subProtocol: ReqRespSubProtocol, peerId: PeerId): boolean { + const limiter = this.subProtocolRateLimiters.get(subProtocol); + if (!limiter) { + // TODO: maybe throw an error here if no rate limiter is configured? + return true; } + return limiter.allow(peerId); + } - destroy() { - this.subProtocolRateLimiters.forEach((limiter) => limiter.destroy()); - } + stop() { + this.subProtocolRateLimiters.forEach(limiter => limiter.stop()); + } } - diff --git a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts index c8807812138..836505ad386 100644 --- a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts +++ b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limits.ts @@ -1,35 +1,35 @@ -import { PING_PROTOCOL, ReqRespSubProtocolRateLimits, STATUS_PROTOCOL, TX_REQ_PROTOCOL } from "../interface.js"; +import { PING_PROTOCOL, type ReqRespSubProtocolRateLimits, STATUS_PROTOCOL, TX_REQ_PROTOCOL } from '../interface.js'; // TODO(md): these defaults need to be tuned export const DEFAULT_RATE_LIMITS: ReqRespSubProtocolRateLimits = { - [PING_PROTOCOL]: { - peerLimit: { - quotaTimeMs: 1000, - quotaCount: 5, - }, - globalLimit: { - quotaTimeMs: 1000, - quotaCount: 10, - }, + [PING_PROTOCOL]: { + peerLimit: { + quotaTimeMs: 1000, + quotaCount: 5, }, - [STATUS_PROTOCOL]: { - peerLimit: { - quotaTimeMs: 1000, - quotaCount: 5, - }, - globalLimit: { - quotaTimeMs: 1000, - quotaCount: 10, - }, + globalLimit: { + quotaTimeMs: 1000, + quotaCount: 10, }, - [TX_REQ_PROTOCOL]: { - peerLimit: { - quotaTimeMs: 1000, - quotaCount: 5, - }, - globalLimit: { - quotaTimeMs: 1000, - quotaCount: 10, - }, - } -} \ No newline at end of file + }, + [STATUS_PROTOCOL]: { + peerLimit: { + quotaTimeMs: 1000, + quotaCount: 5, + }, + globalLimit: { + quotaTimeMs: 1000, + quotaCount: 10, + }, + }, + [TX_REQ_PROTOCOL]: { + peerLimit: { + quotaTimeMs: 1000, + quotaCount: 5, + }, + globalLimit: { + quotaTimeMs: 1000, + quotaCount: 10, + }, + }, +}; diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 2ee27e6f641..dfade608a73 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -8,13 +8,13 @@ import { type Libp2p } from 'libp2p'; import { type Uint8ArrayList } from 'uint8arraylist'; import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; -import { RequestResponseRateLimiter } from './rate_limiter/rate_limiter.js'; import { type P2PReqRespConfig } from './config.js'; import { DEFAULT_SUB_PROTOCOL_HANDLERS, type ReqRespSubProtocol, type ReqRespSubProtocolHandlers, } from './interface.js'; +import { RequestResponseRateLimiter } from './rate_limiter/rate_limiter.js'; /** * The Request Response Service From 0e22558b2b744526658f199f7c100e38353492a1 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 20:26:24 +0000 Subject: [PATCH 034/123] test: add to reqresp.test.ts --- .../reqresp/rate_limiter/rate_limiter.ts | 34 +++++++++---------- .../p2p/src/service/reqresp/reqresp.test.ts | 24 +++++++++++++ .../p2p/src/service/reqresp/reqresp.ts | 3 +- 3 files changed, 43 insertions(+), 18 deletions(-) diff --git a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts index f92ad0eb81e..7efea5056d4 100644 --- a/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts +++ b/yarn-project/p2p/src/service/reqresp/rate_limiter/rate_limiter.ts @@ -94,8 +94,6 @@ export class SubProtocolRateLimiter { private readonly peerQuotaCount: number; private readonly peerQuotaTimeMs: number; - private cleanupInterval: NodeJS.Timeout | undefined = undefined; - constructor(peerQuotaCount: number, peerQuotaTimeMs: number, globalQuotaCount: number, globalQuotaTimeMs: number) { this.peerLimiters = new Map(); this.globalLimiter = new GCRARateLimiter(globalQuotaCount, globalQuotaTimeMs); @@ -103,12 +101,6 @@ export class SubProtocolRateLimiter { this.peerQuotaTimeMs = peerQuotaTimeMs; } - start() { - this.cleanupInterval = setInterval(() => { - this.cleanupInactivePeers(); - }, CHECK_DISCONNECTED_PEERS_INTERVAL_MS); - } - allow(peerId: PeerId): boolean { if (!this.globalLimiter.allow()) { return false; @@ -129,7 +121,7 @@ export class SubProtocolRateLimiter { return peerLimiter.limiter.allow(); } - private cleanupInactivePeers() { + cleanupInactivePeers() { const now = Date.now(); this.peerLimiters.forEach((peerLimiter, peerId) => { if (now - peerLimiter.lastAccess > CHECK_DISCONNECTED_PEERS_INTERVAL_MS) { @@ -137,13 +129,6 @@ export class SubProtocolRateLimiter { } }); } - - /** - * Make sure to call destroy on each of the sub protocol rate limiters when cleaning up - */ - stop() { - clearInterval(this.cleanupInterval); - } } /** @@ -170,6 +155,8 @@ export class SubProtocolRateLimiter { export class RequestResponseRateLimiter { private subProtocolRateLimiters: Map; + private cleanupInterval: NodeJS.Timeout | undefined = undefined; + constructor(rateLimits: ReqRespSubProtocolRateLimits = DEFAULT_RATE_LIMITS) { this.subProtocolRateLimiters = new Map(); @@ -186,6 +173,12 @@ export class RequestResponseRateLimiter { } } + start() { + this.cleanupInterval = setInterval(() => { + this.cleanupInactivePeers(); + }, CHECK_DISCONNECTED_PEERS_INTERVAL_MS); + } + allow(subProtocol: ReqRespSubProtocol, peerId: PeerId): boolean { const limiter = this.subProtocolRateLimiters.get(subProtocol); if (!limiter) { @@ -195,7 +188,14 @@ export class RequestResponseRateLimiter { return limiter.allow(peerId); } + cleanupInactivePeers() { + this.subProtocolRateLimiters.forEach(limiter => limiter.cleanupInactivePeers()); + } + + /** + * Make sure to call destroy on each of the sub protocol rate limiters when cleaning up + */ stop() { - this.subProtocolRateLimiters.forEach(limiter => limiter.stop()); + clearInterval(this.cleanupInterval); } } diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index ecd9a6824e9..7c57d3fa845 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -72,6 +72,30 @@ describe('ReqResp', () => { await stopNodes(nodes); }); + it('Should hit a rate limit if too many requests are made in quick succession', async () => { + const nodes = await createNodes(2); + + await startNodes(nodes); + + // Spy on the logger to make sure the error message is logged + const loggerSpy = jest.spyOn((nodes[1].req as any).logger, 'warn'); + + await sleep(500); + await connectToPeers(nodes); + await sleep(500); + + // Default rate is set at 1 every 200 ms; so this should fire a few times + for (let i = 0; i < 10; i++) { + await nodes[0].req.sendRequestToPeer(nodes[1].p2p.peerId, PING_PROTOCOL, Buffer.from('ping')); + } + + // Make sure the error message is logged + const errorMessage = `Rate limit exceeded for ${PING_PROTOCOL} from ${nodes[0].p2p.peerId.toString()}`; + expect(loggerSpy).toHaveBeenCalledWith(errorMessage); + + await stopNodes(nodes); + }); + describe('TX REQ PROTOCOL', () => { it('Can request a Tx from TxHash', async () => { const tx = mockTx(); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index dfade608a73..69275a8023f 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -56,6 +56,7 @@ export class ReqResp { for (const subProtocol of Object.keys(this.subProtocolHandlers)) { await this.libp2p.handle(subProtocol, this.streamHandler.bind(this, subProtocol as ReqRespSubProtocol)); } + this.rateLimiter.start(); } /** @@ -66,7 +67,7 @@ export class ReqResp { for (const protocol of Object.keys(this.subProtocolHandlers)) { await this.libp2p.unhandle(protocol); } - this.rateLimiter.destroy(); + this.rateLimiter.stop(); await this.libp2p.stop(); this.abortController.abort(); } From d0da214af28fdbc768b4f95aa7425d65ee65ab20 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 11 Sep 2024 20:33:23 +0000 Subject: [PATCH 035/123] fix: add todo pr number --- yarn-project/p2p/src/service/reqresp/interface.ts | 12 +----------- yarn-project/p2p/src/service/reqresp/reqresp.ts | 2 +- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 41cce2ac954..606efc17bc9 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -17,20 +17,10 @@ export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | export type ReqRespSubProtocolHandler = (msg: Buffer) => Promise; /** - * A type mapping from supprotocol to it's description + * A type mapping from supprotocol to it's rate limits */ -export type ReqRespSubProtocols = Record; - export type ReqRespSubProtocolRateLimits = Record; -/** - * Each Request Response Sub Protocol define's it's own handler and rate limits - */ -export type ReqRespSubProtocolDescription = { - handler: ReqRespSubProtocolHandler; - rateLimits: ProtocolRateLimitQuota; -}; - /** * A rate limit quota */ diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 69275a8023f..51f7a12eb8a 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -178,7 +178,7 @@ export class ReqResp { if (!this.rateLimiter.allow(protocol, connection.remotePeer)) { this.logger.warn(`Rate limit exceeded for ${protocol} from ${connection.remotePeer}`); - // TODO: handle changing peer scoring for failed rate limit, maybe differentiate between global and peer limits here when punishing + // TODO(#8483): handle changing peer scoring for failed rate limit, maybe differentiate between global and peer limits here when punishing await stream.close(); return; } From 2b462681df9017b95b7440b1e2db4163915cbd80 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:38:08 +0000 Subject: [PATCH 036/123] feat: introduce new light signer --- .../archiver/src/archiver/eth_log_handlers.ts | 3 +- .../src/p2p/block_attestation.ts | 2 +- .../circuit-types/src/p2p/block_proposal.ts | 2 +- yarn-project/circuit-types/src/p2p/index.ts | 1 - yarn-project/circuit-types/src/p2p/mocks.ts | 2 +- .../circuit-types/src/p2p/signature.test.ts | 31 ---------- yarn-project/foundation/package.json | 4 +- yarn-project/foundation/src/crypto/index.ts | 1 + .../src/crypto/secp256k1-signer/index.ts | 2 + .../secp256k1-signer/secp256k1_signer.test.ts | 58 +++++++++++++++++++ .../secp256k1-signer/secp256k1_signer.ts | 13 +++++ .../src/crypto/secp256k1-signer/utils.ts | 52 +++++++++++++++++ .../src/eth-signature/eth_siganture.test.ts | 35 +++++++++++ .../src/eth-signature/eth_signature.ts} | 37 +++++++----- .../foundation/src/eth-signature/index.ts | 1 + yarn-project/foundation/src/index.ts | 1 + .../p2p/src/attestation_pool/mocks.ts | 3 +- .../p2p/src/service/reqresp/interface.ts | 4 ++ .../src/key_store/interface.ts | 2 +- .../src/key_store/local_key_store.ts | 2 +- yarn-project/yarn.lock | 1 + 21 files changed, 203 insertions(+), 54 deletions(-) delete mode 100644 yarn-project/circuit-types/src/p2p/signature.test.ts create mode 100644 yarn-project/foundation/src/crypto/secp256k1-signer/index.ts create mode 100644 yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts create mode 100644 yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts create mode 100644 yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts create mode 100644 yarn-project/foundation/src/eth-signature/eth_siganture.test.ts rename yarn-project/{circuit-types/src/p2p/signature.ts => foundation/src/eth-signature/eth_signature.ts} (85%) create mode 100644 yarn-project/foundation/src/eth-signature/index.ts diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index fa218fb2086..c233343306e 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -1,4 +1,5 @@ -import { Body, InboxLeaf, L2Block, type ViemSignature } from '@aztec/circuit-types'; +import { Body, InboxLeaf, L2Block } from '@aztec/circuit-types'; +import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { AppendOnlyTreeSnapshot, Header, Proof } from '@aztec/circuits.js'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index e38effdb368..26d436be536 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -8,7 +8,7 @@ import { recoverMessageAddress } from 'viem'; import { TxHash } from '../tx/tx_hash.js'; import { get0xStringHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; -import { Signature } from './signature.js'; +import { Signature } from '@aztec/foundation/eth-signature'; import { TopicType, createTopicString } from './topic_type.js'; export class BlockAttestationHash extends Buffer32 { diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 8164e755117..77a6b01a2e9 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -8,7 +8,7 @@ import { recoverMessageAddress } from 'viem'; import { TxHash } from '../tx/tx_hash.js'; import { get0xStringHashedSignaturePayload, getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; -import { Signature } from './signature.js'; +import { Signature } from '@aztec/foundation/eth-signature'; import { TopicType, createTopicString } from './topic_type.js'; export class BlockProposalHash extends Buffer32 { diff --git a/yarn-project/circuit-types/src/p2p/index.ts b/yarn-project/circuit-types/src/p2p/index.ts index e6c268523c1..8afa13bb6c5 100644 --- a/yarn-project/circuit-types/src/p2p/index.ts +++ b/yarn-project/circuit-types/src/p2p/index.ts @@ -3,5 +3,4 @@ export * from './block_proposal.js'; export * from './interface.js'; export * from './gossipable.js'; export * from './topic_type.js'; -export * from './signature.js'; export * from './block_utils.js'; diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 2e4b9495d66..34dec1dc312 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -8,7 +8,7 @@ import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; import { get0xStringHashedSignaturePayload } from './block_utils.js'; -import { Signature } from './signature.js'; +import { Signature } from '@aztec/foundation/eth-signature'; export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise => { signer = signer || randomSigner(); diff --git a/yarn-project/circuit-types/src/p2p/signature.test.ts b/yarn-project/circuit-types/src/p2p/signature.test.ts deleted file mode 100644 index 7e443a4f105..00000000000 --- a/yarn-project/circuit-types/src/p2p/signature.test.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -import { recoverMessageAddress } from 'viem'; - -import { randomSigner } from './mocks.js'; -import { Signature } from './signature.js'; - -describe('Signature serialization / deserialization', () => { - it('Should serialize / deserialize', async () => { - const signer = randomSigner(); - - const originalMessage = Fr.random(); - const m = `0x${originalMessage.toBuffer().toString('hex')}`; - - const signature = await signer.signMessage({ message: m }); - - const signatureObj = Signature.from0xString(signature); - - // Serde - const serialized = signatureObj.toBuffer(); - const deserialized = Signature.fromBuffer(serialized); - expect(deserialized).toEqual(signatureObj); - - const as0x = deserialized.to0xString(); - expect(as0x).toEqual(signature); - - // Recover signature - const sender = await recoverMessageAddress({ message: originalMessage.toString(), signature: as0x }); - expect(sender).toEqual(signer.address); - }); -}); diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index f14bc84c6e1..2af937f8c18 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -17,6 +17,7 @@ "./crypto": "./dest/crypto/index.js", "./error": "./dest/error/index.js", "./eth-address": "./dest/eth-address/index.js", + "./eth-signature": "./dest/eth-signature/index.js", "./queue": "./dest/queue/index.js", "./fs": "./dest/fs/index.js", "./buffer": "./dest/buffer/index.js", @@ -144,7 +145,8 @@ "prettier": "^2.7.1", "supertest": "^6.3.3", "ts-node": "^10.9.1", - "typescript": "^5.0.4" + "typescript": "^5.0.4", + "viem": "^2.7.15" }, "files": [ "dest", diff --git a/yarn-project/foundation/src/crypto/index.ts b/yarn-project/foundation/src/crypto/index.ts index 96365958e92..4a93708f498 100644 --- a/yarn-project/foundation/src/crypto/index.ts +++ b/yarn-project/foundation/src/crypto/index.ts @@ -6,6 +6,7 @@ export * from './sha256/index.js'; export * from './sha512/index.js'; export * from './pedersen/index.js'; export * from './poseidon/index.js'; +export * from './secp256k1-signer/index.js'; /** * Init the bb singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts new file mode 100644 index 00000000000..b5f5f62c0df --- /dev/null +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts @@ -0,0 +1,2 @@ +export * from './secp256k1_signer.js'; +export * from './utils.js'; \ No newline at end of file diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts new file mode 100644 index 00000000000..9baf298b9ee --- /dev/null +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts @@ -0,0 +1,58 @@ + +import { generatePrivateKey, PrivateKeyAccount, privateKeyToAccount, publicKeyToAddress } from "viem/accounts"; +import { Secp256k1Signer } from "./secp256k1_signer.js"; +import {recoverPublicKey as lightRecoverPublicKey, recoverAddress as lightRecoverAddress } from "./utils.js"; +import { EthAddress } from "@aztec/foundation/eth-address"; +import { hashMessage, recoverPublicKey as viemRecoverPublicKey, recoverAddress as viemRecoverAddress } from "viem"; +import { Signature } from "@aztec/foundation/eth-signature"; +import { Buffer32 } from "@aztec/foundation/buffer"; + +/** + * Differential fuzzing implementation of viem's signer and the secp256k1 signer + */ +describe('Secp256k1Signer', () => { + let viemSigner: PrivateKeyAccount; + let lightSigner: Secp256k1Signer; + + beforeEach(() => { + const privateKey = generatePrivateKey(); + viemSigner = privateKeyToAccount(privateKey); + + lightSigner = new Secp256k1Signer(Buffer32.fromBuffer(Buffer.from(privateKey.slice(2), 'hex'))); + }); + + it('Compare implementation against viem', async () => { + const message = Buffer.from('0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', 'hex'); + // Use to compare addresses at the end + const accountAddress = viemSigner.address; + + // We use eth hashed message as viem will automatically do this with signMessage + const ethHashedMessage = hashMessage({raw: message } ); + const ethHashedMessageBuffer = Buffer32.fromBuffer(Buffer.from(ethHashedMessage.slice(2), "hex")); + + const viemSignature = Signature.from0xString(await viemSigner.signMessage({ message: { raw: message } })); + const lightSignature = lightSigner.sign(ethHashedMessageBuffer); + + // Check signatures match + expect(viemSignature.equals(lightSignature)).toBe(true); + + + const viemPublicKey = await viemRecoverPublicKey({hash: ethHashedMessage, signature: viemSignature.to0xString()}); + const lightPublicKey = lightRecoverPublicKey(ethHashedMessageBuffer, lightSignature); + + // Check recovered public keys match + expect(Buffer.from(viemPublicKey.slice(2), "hex")).toEqual(lightPublicKey); + + // Get the eth address can be recovered from the message and signature + const viemPublicKeyToAddress = publicKeyToAddress(viemPublicKey); + const viemAddress = EthAddress.fromString(await viemRecoverAddress({hash: ethHashedMessage, signature: viemSignature.to0xString()})); + const lightAddress = lightRecoverAddress(Buffer32.fromBuffer(Buffer.from(ethHashedMessage.slice(2), "hex")), lightSignature); + + // Check viem signer matches + expect(viemAddress.toString()).toEqual(accountAddress.toString().toLowerCase()); + expect(accountAddress.toString()).toEqual(viemPublicKeyToAddress.toString()); + + // Check light signer matches + expect(viemAddress.toString()).toEqual(lightAddress.toString()); + }); +}); \ No newline at end of file diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts new file mode 100644 index 00000000000..76d5a57ad54 --- /dev/null +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts @@ -0,0 +1,13 @@ +import { Buffer32 } from "@aztec/foundation/buffer"; +import { Signature } from "@aztec/foundation/eth-signature"; +import { signMessage } from "./utils.js"; + +export class Secp256k1Signer { + constructor(private privateKey: Buffer32) {} + + sign(message: Buffer32): Signature { + return signMessage(message, this.privateKey.buffer) + } +} + + diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts new file mode 100644 index 00000000000..760df60325a --- /dev/null +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts @@ -0,0 +1,52 @@ +import { secp256k1 } from "@noble/curves/secp256k1"; +import { EthAddress } from "../../eth-address/index.js"; +import { keccak256 } from "../keccak/index.js"; +import { Signature } from "../../eth-signature/eth_signature.js"; +import { Buffer32 } from "../../buffer/buffer32.js"; + +function publicKeyToAddress(publicKey: Buffer): EthAddress { + const hash = keccak256(publicKey.slice(1)); + return new EthAddress(hash.subarray(12)); +} + +export function recoverAddress(hash: Buffer32, signature: Signature): EthAddress { + const publicKey = recoverPublicKey(hash, signature); + return publicKeyToAddress(publicKey); +} + +function toRecoveryBit(yParityOrV: number) { + if (yParityOrV === 0 || yParityOrV === 1) return yParityOrV + if (yParityOrV === 27) return 0 + if (yParityOrV === 28) return 1 + throw new Error('Invalid yParityOrV value') +} + +export function signMessage(message: Buffer32, privateKey: Buffer) { + const {r,s,recovery} = secp256k1.sign(message.buffer, privateKey) + return new Signature( + Buffer32.fromBigInt(r), + Buffer32.fromBigInt(s), + recovery ? 28 : 27 + ); +} + +export function recoverPublicKey( + hash: Buffer32, + signature: Signature, + ): Buffer { + + const signature_ = (() => { + // typeof signature: `Signature` + const { r, s, v } = signature + const recoveryBit = toRecoveryBit(v) + return new secp256k1.Signature( + r.toBigInt(), + s.toBigInt(), + ).addRecoveryBit(recoveryBit) + })() + + const publicKey = signature_ + .recoverPublicKey(hash.buffer) + .toHex(false) + return Buffer.from(publicKey, 'hex'); + } \ No newline at end of file diff --git a/yarn-project/foundation/src/eth-signature/eth_siganture.test.ts b/yarn-project/foundation/src/eth-signature/eth_siganture.test.ts new file mode 100644 index 00000000000..732e63a00b6 --- /dev/null +++ b/yarn-project/foundation/src/eth-signature/eth_siganture.test.ts @@ -0,0 +1,35 @@ + +import { Fr } from '@aztec/foundation/fields'; + +import { Signature } from './eth_signature.js'; +import { randomBytes } from '../crypto/index.js'; +import { Secp256k1Signer, recoverAddress } from '@aztec/foundation/crypto'; +import { Buffer32 } from '@aztec/foundation/buffer'; + +const randomSigner = () => { + const pk = Buffer32.fromBuffer(randomBytes(32)); + return new Secp256k1Signer(pk); +} + +describe('Signature serialization / deserialization', () => { + it('Should serialize / deserialize', () => { + const signer = randomSigner(); + + const originalMessage = Fr.random(); + const message = Buffer32.fromField(originalMessage); + + const signature = signer.sign(message); + + console.log(signature); + + // Serde + const serialized = signature.toBuffer(); + const deserialized = Signature.fromBuffer(serialized); + expect(deserialized).toEqual(signature); + + // Recover signature + const sender = recoverAddress(message, signature); + // TODO(md): add a getAddress method to the signer + // expect(sender).toEqual(signer.getAddress()); + }); +}); diff --git a/yarn-project/circuit-types/src/p2p/signature.ts b/yarn-project/foundation/src/eth-signature/eth_signature.ts similarity index 85% rename from yarn-project/circuit-types/src/p2p/signature.ts rename to yarn-project/foundation/src/eth-signature/eth_signature.ts index 41870e59c62..f5d7eb481b1 100644 --- a/yarn-project/circuit-types/src/p2p/signature.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.ts @@ -1,5 +1,5 @@ -import { Buffer32 } from '@aztec/foundation/buffer'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { Buffer32 } from "@aztec/foundation/buffer"; +import { serializeToBuffer, BufferReader } from "@aztec/foundation/serialize"; /**Viem Signature * @@ -19,6 +19,7 @@ export type ViemSignature = { * Contains a signature split into it's primary components (r,s,v) */ export class Signature { + constructor( /** The r value of the signature */ public readonly r: Buffer32, @@ -32,8 +33,11 @@ export class Signature { static fromBuffer(buf: Buffer | BufferReader): Signature { const reader = BufferReader.asReader(buf); + const r = reader.readObject(Buffer32); + console.log("reading r ", r); const s = reader.readObject(Buffer32); + console.log("reading s", s); const v = reader.readNumber(); const isEmpty = r.isZero() && s.isZero(); @@ -41,18 +45,6 @@ export class Signature { return new Signature(r, s, v, isEmpty); } - toBuffer(): Buffer { - return serializeToBuffer([this.r, this.s, this.v]); - } - - static empty(): Signature { - return new Signature(Buffer32.ZERO, Buffer32.ZERO, 0, true); - } - - to0xString(): `0x${string}` { - return `0x${this.r.toString()}${this.s.toString()}${this.v.toString(16)}`; - } - /** * A seperate method exists for this as when signing locally with viem, as when * parsing from viem, we can expect the v value to be a u8, rather than our @@ -71,6 +63,23 @@ export class Signature { return new Signature(r, s, v, isEmpty); } + static empty(): Signature { + return new Signature(Buffer32.ZERO, Buffer32.ZERO, 0, true); + } + + equals(other: Signature): boolean { + return this.r.equals(other.r) && this.s.equals(other.s) && this.v === other.v && this.isEmpty === other.isEmpty; + } + + toBuffer(): Buffer { + return serializeToBuffer([this.r, this.s, this.v]); + } + + + to0xString(): `0x${string}` { + return `0x${this.r.toString()}${this.s.toString()}${this.v.toString(16)}`; + } + /** * Return the signature with `0x${string}` encodings for r and s */ diff --git a/yarn-project/foundation/src/eth-signature/index.ts b/yarn-project/foundation/src/eth-signature/index.ts new file mode 100644 index 00000000000..a0ea107137d --- /dev/null +++ b/yarn-project/foundation/src/eth-signature/index.ts @@ -0,0 +1 @@ +export * from './eth_signature.js'; \ No newline at end of file diff --git a/yarn-project/foundation/src/index.ts b/yarn-project/foundation/src/index.ts index 7a899eba520..78694f262f8 100644 --- a/yarn-project/foundation/src/index.ts +++ b/yarn-project/foundation/src/index.ts @@ -30,3 +30,4 @@ export * as worker from './worker/index.js'; export * as testing from './testing/index.js'; export * as config from './config/index.js'; export * as buffer from './buffer/index.js'; +export * as ethSignature from './eth-signature/index.js'; diff --git a/yarn-project/p2p/src/attestation_pool/mocks.ts b/yarn-project/p2p/src/attestation_pool/mocks.ts index 00194d52b96..8dcd0d6870b 100644 --- a/yarn-project/p2p/src/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/attestation_pool/mocks.ts @@ -1,4 +1,5 @@ -import { BlockAttestation, Signature, TxHash } from '@aztec/circuit-types'; +import { BlockAttestation, TxHash } from '@aztec/circuit-types'; +import { Signature } from '@aztec/foundation/eth-signature'; import { makeHeader } from '@aztec/circuits.js/testing'; import { Fr } from '@aztec/foundation/fields'; import { serializeToBuffer } from '@aztec/foundation/serialize'; diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 606efc17bc9..84e1ca45258 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -6,9 +6,11 @@ import { Tx, TxHash } from '@aztec/circuit-types'; export const PING_PROTOCOL = '/aztec/req/ping/0.1.0'; export const STATUS_PROTOCOL = '/aztec/req/status/0.1.0'; export const TX_REQ_PROTOCOL = '/aztec/req/tx/0.1.0'; +// export const ATTESTATION_REQ_PROTOCOL = '/aztec/req/attestation/0.1.0'; // Sum type for sub protocols export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | typeof TX_REQ_PROTOCOL; +// | typeof ATTESTATION_REQ_PROTOCOL; /** * A handler for a sub protocol @@ -74,6 +76,7 @@ export const DEFAULT_SUB_PROTOCOL_HANDLERS: ReqRespSubProtocolHandlers = { [PING_PROTOCOL]: defaultHandler, [STATUS_PROTOCOL]: defaultHandler, [TX_REQ_PROTOCOL]: defaultHandler, + // [ATTESTATION_REQ_PROTOCOL]: defaultHandler, }; /** @@ -127,4 +130,5 @@ export const subProtocolMap: SubProtocolMap = { request: TxHash, response: Tx, }, + // [ATTESTATION_REQ_PROTOCOL]: { }; diff --git a/yarn-project/validator-client/src/key_store/interface.ts b/yarn-project/validator-client/src/key_store/interface.ts index d55921b6dca..a659ff5afaa 100644 --- a/yarn-project/validator-client/src/key_store/interface.ts +++ b/yarn-project/validator-client/src/key_store/interface.ts @@ -1,4 +1,4 @@ -import { type Signature } from '@aztec/circuit-types'; +import { type Signature } from '@aztec/foundation/eth-signature'; /** Key Store * diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index 220b62fe409..5ab94b5e5f0 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -1,4 +1,4 @@ -import { Signature } from '@aztec/circuit-types'; +import { Signature } from '@aztec/foundation/eth-signature'; import { type PrivateKeyAccount, privateKeyToAccount } from 'viem/accounts'; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index beac450610c..095f815d25e 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -677,6 +677,7 @@ __metadata: supertest: ^6.3.3 ts-node: ^10.9.1 typescript: ^5.0.4 + viem: ^2.7.15 zod: ^3.22.4 languageName: unknown linkType: soft From 65d009a83c8c063df2e5d60c6bd9cc3c94886576 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:38:18 +0000 Subject: [PATCH 037/123] chore: migrations --- .../sequencer-client/src/publisher/l1-publisher.test.ts | 4 +++- yarn-project/sequencer-client/src/publisher/l1-publisher.ts | 3 ++- yarn-project/sequencer-client/src/sequencer/sequencer.test.ts | 3 ++- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 2 +- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 0d09e221cd0..ef992a20b2a 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -1,5 +1,7 @@ -import { L2Block, type ViemSignature } from '@aztec/circuit-types'; +import { L2Block } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; + +import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index cfbd2a634da..2931bf7012d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,4 +1,5 @@ -import { type L2Block, type Signature, type TxHash } from '@aztec/circuit-types'; +import { type L2Block, type TxHash } from '@aztec/circuit-types'; +import { type Signature } from '@aztec/foundation/eth-signature'; import { getHashedSignaturePayload } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { ETHEREUM_SLOT_DURATION, EthAddress, type Header, type Proof } from '@aztec/circuits.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index f7bdf8c1323..9483c17b19d 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -10,7 +10,6 @@ import { PROVING_STATUS, type ProvingSuccess, type ProvingTicket, - Signature, type Tx, TxHash, type UnencryptedL2Log, @@ -18,6 +17,8 @@ import { makeProcessedTx, mockTxForRollup, } from '@aztec/circuit-types'; + +import { Signature } from '@aztec/foundation/eth-signature'; import { AztecAddress, EthAddress, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 77f1f12f7aa..8edbb7f4690 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -4,11 +4,11 @@ import { type L2Block, type L2BlockSource, type ProcessedTx, - Signature, Tx, type TxHash, type TxValidator, } from '@aztec/circuit-types'; +import { Signature } from '@aztec/foundation/eth-signature'; import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { From 69aa4a04ec62a76b611d6350787fffdaa81d4135 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:57:58 +0000 Subject: [PATCH 038/123] chore: proliferate --- .../archiver/src/archiver/eth_log_handlers.ts | 2 +- .../src/p2p/block_attestation.test.ts | 12 ++++----- .../src/p2p/block_attestation.ts | 17 +++++------- .../src/p2p/block_proposal.test.ts | 12 ++++----- .../circuit-types/src/p2p/block_proposal.ts | 20 +++++--------- .../circuit-types/src/p2p/block_utils.ts | 5 ++-- yarn-project/circuit-types/src/p2p/mocks.ts | 26 +++++++++---------- .../secp256k1-signer/secp256k1_signer.test.ts | 2 +- .../secp256k1-signer/secp256k1_signer.ts | 13 +++++++--- .../src/crypto/secp256k1-signer/utils.ts | 16 +++++++++--- ...iganture.test.ts => eth_signature.test.ts} | 6 ++--- .../p2p/src/attestation_pool/mocks.ts | 2 +- .../src/publisher/l1-publisher.test.ts | 1 - .../src/publisher/l1-publisher.ts | 2 +- .../src/sequencer/sequencer.test.ts | 3 +-- .../src/sequencer/sequencer.ts | 2 +- .../src/duties/validation_service.ts | 5 ++-- .../src/key_store/interface.ts | 3 ++- .../src/key_store/local_key_store.ts | 17 ++++++------ 19 files changed, 83 insertions(+), 83 deletions(-) rename yarn-project/foundation/src/eth-signature/{eth_siganture.test.ts => eth_signature.test.ts} (80%) diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index c233343306e..5781f1a3d0f 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -1,7 +1,7 @@ import { Body, InboxLeaf, L2Block } from '@aztec/circuit-types'; -import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { AppendOnlyTreeSnapshot, Header, Proof } from '@aztec/circuits.js'; import { type EthAddress } from '@aztec/foundation/eth-address'; +import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { numToUInt32BE } from '@aztec/foundation/serialize'; import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.test.ts b/yarn-project/circuit-types/src/p2p/block_attestation.test.ts index 1a0ba523548..9fe3789454b 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.test.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.test.ts @@ -3,8 +3,8 @@ import { BlockAttestation } from './block_attestation.js'; import { makeBlockAttestation, randomSigner } from './mocks.js'; describe('Block Attestation serialization / deserialization', () => { - it('Should serialize / deserialize', async () => { - const attestation = await makeBlockAttestation(); + it('Should serialize / deserialize', () => { + const attestation = makeBlockAttestation(); const serialized = attestation.toBuffer(); const deserialized = BlockAttestation.fromBuffer(serialized); @@ -12,17 +12,17 @@ describe('Block Attestation serialization / deserialization', () => { expect(deserialized).toEqual(attestation); }); - it('Should serialize / deserialize + recover sender', async () => { + it('Should serialize / deserialize + recover sender', () => { const account = randomSigner(); - const proposal = await makeBlockAttestation(account); + const proposal = makeBlockAttestation(account); const serialized = proposal.toBuffer(); const deserialized = BlockAttestation.fromBuffer(serialized); expect(deserialized).toEqual(proposal); // Recover signature - const sender = await deserialized.getSender(); - expect(sender.toChecksumString()).toEqual(account.address); + const sender = deserialized.getSender(); + expect(sender).toEqual(account.address); }); }); diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 26d436be536..8d338b3021c 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -1,15 +1,14 @@ -import { EthAddress, Header } from '@aztec/circuits.js'; +import { type EthAddress, Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; +import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { recoverMessageAddress } from 'viem'; - import { TxHash } from '../tx/tx_hash.js'; -import { get0xStringHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; +import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; -import { Signature } from '@aztec/foundation/eth-signature'; import { TopicType, createTopicString } from './topic_type.js'; +import { recoverAddress } from '@aztec/foundation/crypto'; export class BlockAttestationHash extends Buffer32 { constructor(hash: Buffer) { @@ -56,13 +55,9 @@ export class BlockAttestation extends Gossipable { async getSender() { if (!this.sender) { // Recover the sender from the attestation - const hashed = get0xStringHashedSignaturePayload(this.archive, this.txHashes); - const address = await recoverMessageAddress({ - message: { raw: hashed }, - signature: this.signature.to0xString(), - }); + const hashed = getHashedSignaturePayload(this.archive, this.txHashes); // Cache the sender for later use - this.sender = EthAddress.fromString(address); + this.sender = recoverAddress(hashed, this.signature); } return this.sender; diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.test.ts b/yarn-project/circuit-types/src/p2p/block_proposal.test.ts index 6eb1c427f4f..d6c91f14b20 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.test.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.test.ts @@ -3,8 +3,8 @@ import { BlockProposal } from './block_proposal.js'; import { makeBlockProposal, randomSigner } from './mocks.js'; describe('Block Proposal serialization / deserialization', () => { - it('Should serialize / deserialize', async () => { - const proposal = await makeBlockProposal(); + it('Should serialize / deserialize', () => { + const proposal = makeBlockProposal(); const serialized = proposal.toBuffer(); const deserialized = BlockProposal.fromBuffer(serialized); @@ -12,17 +12,17 @@ describe('Block Proposal serialization / deserialization', () => { expect(deserialized).toEqual(proposal); }); - it('Should serialize / deserialize + recover sender', async () => { + it('Should serialize / deserialize + recover sender', () => { const account = randomSigner(); - const proposal = await makeBlockProposal(account); + const proposal = makeBlockProposal(account); const serialized = proposal.toBuffer(); const deserialized = BlockProposal.fromBuffer(serialized); expect(deserialized).toEqual(proposal); // Recover signature - const sender = await deserialized.getSender(); - expect(sender.toChecksumString()).toEqual(account.address); + const sender = deserialized.getSender(); + expect(sender).toEqual(account.address); }); }); diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 77a6b01a2e9..83ac65ed27f 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -1,15 +1,14 @@ -import { EthAddress, Header } from '@aztec/circuits.js'; +import { type EthAddress, Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; +import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { recoverMessageAddress } from 'viem'; - import { TxHash } from '../tx/tx_hash.js'; import { get0xStringHashedSignaturePayload, getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; -import { Signature } from '@aztec/foundation/eth-signature'; import { TopicType, createTopicString } from './topic_type.js'; +import { recoverAddress } from '@aztec/foundation/crypto'; export class BlockProposalHash extends Buffer32 { constructor(hash: Buffer) { @@ -54,7 +53,7 @@ export class BlockProposal extends Gossipable { header: Header, archive: Fr, txs: TxHash[], - payloadSigner: (payload: Buffer) => Promise, + payloadSigner: (payload: Buffer32) => Promise, ) { const hashed = getHashedSignaturePayload(archive, txs); const sig = await payloadSigner(hashed); @@ -65,16 +64,11 @@ export class BlockProposal extends Gossipable { /**Get Sender * Lazily evaluate the sender of the proposal; result is cached */ - async getSender() { + getSender() { if (!this.sender) { - // performance note(): this signature method requires another hash behind the scenes - const hashed = get0xStringHashedSignaturePayload(this.archive, this.txs); - const address = await recoverMessageAddress({ - message: { raw: hashed }, - signature: this.signature.to0xString(), - }); + const hashed = getHashedSignaturePayload(this.archive, this.txs); // Cache the sender for later use - this.sender = EthAddress.fromString(address); + this.sender = recoverAddress(hashed, this.signature); } return this.sender; diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts index cd1c5a48842..cd7b2bb8c48 100644 --- a/yarn-project/circuit-types/src/p2p/block_utils.ts +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -1,5 +1,6 @@ import { keccak256 as keccak256Buffer } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { encodeAbiParameters, keccak256 as keccak2560xString, parseAbiParameters } from 'viem'; @@ -25,8 +26,8 @@ export function getSignaturePayload(archive: Fr, txs: TxHash[]) { * @param txs - The transactions in the block * @returns The hashed payload for the signature of the block proposal */ -export function getHashedSignaturePayload(archive: Fr, txs: TxHash[]): Buffer { - return keccak256Buffer(getSignaturePayload(archive, txs)); +export function getHashedSignaturePayload(archive: Fr, txs: TxHash[]): Buffer32 { + return Buffer32.fromBuffer(keccak256Buffer(getSignaturePayload(archive, txs))); } export function get0xStringHashedSignaturePayload(archive: Fr, txs: TxHash[]): `0x${string}` { diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 34dec1dc312..6824cc37d93 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -1,41 +1,39 @@ import { makeHeader } from '@aztec/circuits.js/testing'; import { Fr } from '@aztec/foundation/fields'; -import { type PrivateKeyAccount } from 'viem'; -import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; - import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; -import { get0xStringHashedSignaturePayload } from './block_utils.js'; -import { Signature } from '@aztec/foundation/eth-signature'; +import { getHashedSignaturePayload } from './block_utils.js'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; +import { Buffer32 } from '@aztec/foundation/buffer'; -export const makeBlockProposal = async (signer?: PrivateKeyAccount): Promise => { +export const makeBlockProposal = (signer?: Secp256k1Signer): BlockProposal => { signer = signer || randomSigner(); const blockHeader = makeHeader(1); const archive = Fr.random(); const txs = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); - const hash = get0xStringHashedSignaturePayload(archive, txs); - const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); + const hash = getHashedSignaturePayload(archive, txs); + const signature = signer.sign(hash); return new BlockProposal(blockHeader, archive, txs, signature); }; // TODO(https://github.com/AztecProtocol/aztec-packages/issues/8028) -export const makeBlockAttestation = async (signer?: PrivateKeyAccount): Promise => { +export const makeBlockAttestation = (signer?: Secp256k1Signer): BlockAttestation => { signer = signer || randomSigner(); const blockHeader = makeHeader(1); const archive = Fr.random(); const txs = [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); - const hash = get0xStringHashedSignaturePayload(archive, txs); - const signature = Signature.from0xString(await signer.signMessage({ message: { raw: hash } })); + const hash = getHashedSignaturePayload(archive, txs); + const signature = signer.sign(hash); return new BlockAttestation(blockHeader, archive, txs, signature); }; -export const randomSigner = (): PrivateKeyAccount => { - const privateKey = generatePrivateKey(); - return privateKeyToAccount(privateKey); +export const randomSigner = (): Secp256k1Signer => { + const privateKey = Buffer32.random(); + return new Secp256k1Signer(privateKey); }; diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts index 9baf298b9ee..c3b24b1b8ab 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts @@ -1,5 +1,5 @@ -import { generatePrivateKey, PrivateKeyAccount, privateKeyToAccount, publicKeyToAddress } from "viem/accounts"; +import { generatePrivateKey, type PrivateKeyAccount, privateKeyToAccount, publicKeyToAddress } from "viem/accounts"; import { Secp256k1Signer } from "./secp256k1_signer.js"; import {recoverPublicKey as lightRecoverPublicKey, recoverAddress as lightRecoverAddress } from "./utils.js"; import { EthAddress } from "@aztec/foundation/eth-address"; diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts index 76d5a57ad54..2acdae10535 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts @@ -1,9 +1,14 @@ -import { Buffer32 } from "@aztec/foundation/buffer"; -import { Signature } from "@aztec/foundation/eth-signature"; -import { signMessage } from "./utils.js"; +import { type Buffer32 } from "@aztec/foundation/buffer"; +import { type Signature } from "@aztec/foundation/eth-signature"; +import { type EthAddress } from "@aztec/foundation/eth-address"; +import { signMessage, addressFromPrivateKey } from "./utils.js"; export class Secp256k1Signer { - constructor(private privateKey: Buffer32) {} + public readonly address: EthAddress; + + constructor(private privateKey: Buffer32) { + this.address = addressFromPrivateKey(privateKey.buffer); + } sign(message: Buffer32): Signature { return signMessage(message, this.privateKey.buffer) diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts index 760df60325a..5d9c4f1c666 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts @@ -9,15 +9,25 @@ function publicKeyToAddress(publicKey: Buffer): EthAddress { return new EthAddress(hash.subarray(12)); } +export function publicKeyFromPrivateKey(privateKey: Buffer): Buffer { + return Buffer.from(secp256k1.getPublicKey(privateKey, false)); +} + +export function addressFromPrivateKey(privateKey: Buffer): EthAddress { + const publicKey = publicKeyFromPrivateKey(privateKey); + return publicKeyToAddress(publicKey); +} + + export function recoverAddress(hash: Buffer32, signature: Signature): EthAddress { const publicKey = recoverPublicKey(hash, signature); return publicKeyToAddress(publicKey); } function toRecoveryBit(yParityOrV: number) { - if (yParityOrV === 0 || yParityOrV === 1) return yParityOrV - if (yParityOrV === 27) return 0 - if (yParityOrV === 28) return 1 + if (yParityOrV === 0 || yParityOrV === 1) {return yParityOrV} + if (yParityOrV === 27) {return 0} + if (yParityOrV === 28) {return 1} throw new Error('Invalid yParityOrV value') } diff --git a/yarn-project/foundation/src/eth-signature/eth_siganture.test.ts b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts similarity index 80% rename from yarn-project/foundation/src/eth-signature/eth_siganture.test.ts rename to yarn-project/foundation/src/eth-signature/eth_signature.test.ts index 732e63a00b6..1dfec4a83ac 100644 --- a/yarn-project/foundation/src/eth-signature/eth_siganture.test.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts @@ -2,12 +2,11 @@ import { Fr } from '@aztec/foundation/fields'; import { Signature } from './eth_signature.js'; -import { randomBytes } from '../crypto/index.js'; import { Secp256k1Signer, recoverAddress } from '@aztec/foundation/crypto'; import { Buffer32 } from '@aztec/foundation/buffer'; const randomSigner = () => { - const pk = Buffer32.fromBuffer(randomBytes(32)); + const pk = Buffer32.random(); return new Secp256k1Signer(pk); } @@ -29,7 +28,6 @@ describe('Signature serialization / deserialization', () => { // Recover signature const sender = recoverAddress(message, signature); - // TODO(md): add a getAddress method to the signer - // expect(sender).toEqual(signer.getAddress()); + expect(sender).toEqual(signer.address); }); }); diff --git a/yarn-project/p2p/src/attestation_pool/mocks.ts b/yarn-project/p2p/src/attestation_pool/mocks.ts index 8dcd0d6870b..7b2e55a7af6 100644 --- a/yarn-project/p2p/src/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/attestation_pool/mocks.ts @@ -1,6 +1,6 @@ import { BlockAttestation, TxHash } from '@aztec/circuit-types'; -import { Signature } from '@aztec/foundation/eth-signature'; import { makeHeader } from '@aztec/circuits.js/testing'; +import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { serializeToBuffer } from '@aztec/foundation/serialize'; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index ef992a20b2a..a3e6d007c42 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -1,6 +1,5 @@ import { L2Block } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; - import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 2931bf7012d..b706f17f94d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,9 +1,9 @@ import { type L2Block, type TxHash } from '@aztec/circuit-types'; -import { type Signature } from '@aztec/foundation/eth-signature'; import { getHashedSignaturePayload } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { ETHEREUM_SLOT_DURATION, EthAddress, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; +import { type Signature } from '@aztec/foundation/eth-signature'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { serializeToBuffer } from '@aztec/foundation/serialize'; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 9483c17b19d..2a828225e58 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -17,8 +17,6 @@ import { makeProcessedTx, mockTxForRollup, } from '@aztec/circuit-types'; - -import { Signature } from '@aztec/foundation/eth-signature'; import { AztecAddress, EthAddress, @@ -30,6 +28,7 @@ import { import { Buffer32 } from '@aztec/foundation/buffer'; import { times } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto'; +import { Signature } from '@aztec/foundation/eth-signature'; import { type Writeable } from '@aztec/foundation/types'; import { type P2P, P2PClientState } from '@aztec/p2p'; import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator'; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 8edbb7f4690..3f2dea5d6c5 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -8,7 +8,6 @@ import { type TxHash, type TxValidator, } from '@aztec/circuit-types'; -import { Signature } from '@aztec/foundation/eth-signature'; import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { @@ -20,6 +19,7 @@ import { Header, StateReference, } from '@aztec/circuits.js'; +import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 3a4ec7e8c6d..6b0291a0939 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -2,6 +2,7 @@ import { BlockAttestation, BlockProposal, type TxHash } from '@aztec/circuit-typ import { type Header } from '@aztec/circuits.js'; import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { type ValidatorKeyStore } from '../key_store/interface.js'; @@ -18,7 +19,7 @@ export class ValidationService { * @returns A block proposal signing the above information (not the current implementation!!!) */ createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { - const payloadSigner = (payload: Buffer) => this.keyStore.sign(payload); + const payloadSigner = (payload: Buffer32) => this.keyStore.sign(payload); return BlockProposal.createProposalFromSigner(header, archive, txs, payloadSigner); } @@ -35,7 +36,7 @@ export class ValidationService { async attestToProposal(proposal: BlockProposal): Promise { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct - const buf = keccak256(proposal.getPayload()); + const buf = Buffer32.fromBuffer(keccak256(proposal.getPayload())); const sig = await this.keyStore.sign(buf); return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); } diff --git a/yarn-project/validator-client/src/key_store/interface.ts b/yarn-project/validator-client/src/key_store/interface.ts index a659ff5afaa..fbacfe71904 100644 --- a/yarn-project/validator-client/src/key_store/interface.ts +++ b/yarn-project/validator-client/src/key_store/interface.ts @@ -1,9 +1,10 @@ import { type Signature } from '@aztec/foundation/eth-signature'; +import { type Buffer32 } from '@aztec/foundation/buffer'; /** Key Store * * A keystore interface that can be replaced with a local keystore / remote signer service */ export interface ValidatorKeyStore { - sign(message: Buffer): Promise; + sign(message: Buffer32): Promise; } diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index 5ab94b5e5f0..a789298e091 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -1,6 +1,6 @@ -import { Signature } from '@aztec/foundation/eth-signature'; - -import { type PrivateKeyAccount, privateKeyToAccount } from 'viem/accounts'; +import { type Signature } from '@aztec/foundation/eth-signature'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { type ValidatorKeyStore } from './interface.js'; @@ -10,10 +10,10 @@ import { type ValidatorKeyStore } from './interface.js'; * An implementation of the Key store using an in memory private key. */ export class LocalKeyStore implements ValidatorKeyStore { - private signer: PrivateKeyAccount; + private signer: Secp256k1Signer; constructor(privateKey: string) { - this.signer = privateKeyToAccount(privateKey as `0x{string}`); + this.signer = new Secp256k1Signer(Buffer32.fromString(privateKey)); } /** @@ -22,10 +22,9 @@ export class LocalKeyStore implements ValidatorKeyStore { * @param messageBuffer - The message buffer to sign * @return signature */ - public async sign(digestBuffer: Buffer): Promise { - const digest: `0x${string}` = `0x${digestBuffer.toString('hex')}`; - const signature = await this.signer.signMessage({ message: { raw: digest } }); + public sign(digest: Buffer32): Promise { + const signature = this.signer.sign(digest); - return Signature.from0xString(signature); + return Promise.resolve(signature); } } From d1098533721c4bb56b50d76488e2808afcbe7d42 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 12 Sep 2024 16:03:37 +0000 Subject: [PATCH 039/123] chore: remove signature lib from leonidas --- l1-contracts/src/core/sequencer_selection/Leonidas.sol | 6 +----- l1-contracts/test/sparta/Sparta.t.sol | 5 +---- yarn-project/end-to-end/src/e2e_p2p_network.test.ts | 2 +- .../sequencer-client/src/client/sequencer-client.ts | 3 ++- .../sequencer-client/src/publisher/l1-publisher.ts | 2 +- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 4 ++-- .../validator-client/src/errors/validator.error.ts | 9 ++++++++- .../validator-client/src/key_store/local_key_store.ts | 4 ++++ yarn-project/validator-client/src/validator.ts | 7 ++++++- 9 files changed, 26 insertions(+), 16 deletions(-) diff --git a/l1-contracts/src/core/sequencer_selection/Leonidas.sol b/l1-contracts/src/core/sequencer_selection/Leonidas.sol index ad9550c794f..f4f2da1c43f 100644 --- a/l1-contracts/src/core/sequencer_selection/Leonidas.sol +++ b/l1-contracts/src/core/sequencer_selection/Leonidas.sol @@ -9,7 +9,6 @@ import {Ownable} from "@oz/access/Ownable.sol"; import {SignatureLib} from "./SignatureLib.sol"; import {SampleLib} from "./SampleLib.sol"; import {Constants} from "../libraries/ConstantsGen.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {ILeonidas} from "./ILeonidas.sol"; @@ -30,7 +29,6 @@ import {ILeonidas} from "./ILeonidas.sol"; contract Leonidas is Ownable, ILeonidas { using EnumerableSet for EnumerableSet.AddressSet; using SignatureLib for SignatureLib.Signature; - using MessageHashUtils for bytes32; /** * @notice The data structure for an epoch @@ -378,8 +376,6 @@ contract Leonidas is Ownable, ILeonidas { // Validate the attestations uint256 validAttestations = 0; - bytes32 ethSignedDigest = _digest.toEthSignedMessageHash(); - for (uint256 i = 0; i < _signatures.length; i++) { SignatureLib.Signature memory signature = _signatures[i]; if (signature.isEmpty) { @@ -387,7 +383,7 @@ contract Leonidas is Ownable, ILeonidas { } // The verification will throw if invalid - signature.verify(committee[i], ethSignedDigest); + signature.verify(committee[i], _digest); validAttestations++; } diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index dc559ede666..0ed73878c47 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -7,7 +7,6 @@ import {DecoderBase} from "../decoders/Base.sol"; import {DataStructures} from "../../src/core/libraries/DataStructures.sol"; import {Constants} from "../../src/core/libraries/ConstantsGen.sol"; import {SignatureLib} from "../../src/core/sequencer_selection/SignatureLib.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {Registry} from "../../src/core/messagebridge/Registry.sol"; import {Inbox} from "../../src/core/messagebridge/Inbox.sol"; @@ -26,7 +25,6 @@ import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; */ contract SpartaTest is DecoderBase { - using MessageHashUtils for bytes32; Registry internal registry; Inbox internal inbox; @@ -280,8 +278,7 @@ contract SpartaTest is DecoderBase { returns (SignatureLib.Signature memory) { uint256 privateKey = privateKeys[_signer]; - bytes32 digestForSig = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digestForSig); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); return SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}); } diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 6caf8f6bec4..851b4b91a35 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -124,7 +124,7 @@ describe('e2e_p2p_network', () => { } }); - it('should rollup txs from all peers', async () => { + it.only('should rollup txs from all peers', async () => { // create the bootstrap node for the network if (!bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 02e17329981..b86dd344780 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -12,6 +12,7 @@ import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; +import { EthAddress } from '@aztec/foundation/eth-address'; /** * Encapsulates the full sequencer and publisher. @@ -100,7 +101,7 @@ export class SequencerClient { this.sequencer.restart(); } - get coinbase() { + get coinbase(): EthAddress { return this.sequencer.coinbase; } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index b706f17f94d..1655dcf6b86 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -256,7 +256,7 @@ export class L1Publisher { // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which // make time consistency checks break. await this.validateBlockForSubmission(block.header, { - digest, + digest: digest.toBuffer(), signatures: attestations ?? [], }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 3f2dea5d6c5..9e870d20b94 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -12,14 +12,14 @@ import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/cir import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { AppendOnlyTreeSnapshot, - AztecAddress, ContentCommitment, - EthAddress, GENESIS_ARCHIVE_ROOT, Header, StateReference, } from '@aztec/circuits.js'; import { Signature } from '@aztec/foundation/eth-signature'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; diff --git a/yarn-project/validator-client/src/errors/validator.error.ts b/yarn-project/validator-client/src/errors/validator.error.ts index 5cc3929962e..36ad31720be 100644 --- a/yarn-project/validator-client/src/errors/validator.error.ts +++ b/yarn-project/validator-client/src/errors/validator.error.ts @@ -2,7 +2,14 @@ import { type TxHash } from '@aztec/circuit-types/tx_hash'; export class ValidatorError extends Error { constructor(message: string) { - super(message); + super(`Validator Error: ${message}`); + } +} + +// TODO(md): add unit test +export class InvalidValidatorPrivateKeyError extends ValidatorError { + constructor() { + super('Invalid validator private key provided'); } } diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index a789298e091..ddc807e0534 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -13,6 +13,10 @@ export class LocalKeyStore implements ValidatorKeyStore { private signer: Secp256k1Signer; constructor(privateKey: string) { + // Trim 0x prefix + if (privateKey.startsWith('0x')) { + privateKey = privateKey.slice(2); + } this.signer = new Secp256k1Signer(Buffer32.fromString(privateKey)); } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 1c8442ef367..cbd1260d70f 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -7,7 +7,7 @@ import { type P2P } from '@aztec/p2p'; import { type ValidatorClientConfig } from './config.js'; import { ValidationService } from './duties/validation_service.js'; -import { AttestationTimeoutError, TransactionsNotAvailableError } from './errors/validator.error.js'; +import { AttestationTimeoutError, InvalidValidatorPrivateKeyError, TransactionsNotAvailableError } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; @@ -42,6 +42,11 @@ export class ValidatorClient implements Validator { } static new(config: ValidatorClientConfig, p2pClient: P2P) { + if (!config.validatorPrivateKey) { + // TODO: more validation here? + throw new InvalidValidatorPrivateKeyError(); + } + const localKeyStore = new LocalKeyStore(config.validatorPrivateKey); const validator = new ValidatorClient( From d3cf4b83056f6fde2d007b456cc438069a77b2ec Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 12 Sep 2024 16:05:02 +0000 Subject: [PATCH 040/123] fmt --- l1-contracts/test/sparta/Sparta.t.sol | 1 - yarn-project/end-to-end/src/e2e_p2p_network.test.ts | 2 +- yarn-project/sequencer-client/src/client/sequencer-client.ts | 2 +- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 4 ++-- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 0ed73878c47..fff0a2828d5 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -25,7 +25,6 @@ import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; */ contract SpartaTest is DecoderBase { - Registry internal registry; Inbox internal inbox; Outbox internal outbox; diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 851b4b91a35..6caf8f6bec4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -124,7 +124,7 @@ describe('e2e_p2p_network', () => { } }); - it.only('should rollup txs from all peers', async () => { + it('should rollup txs from all peers', async () => { // create the bootstrap node for the network if (!bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index b86dd344780..9bd57cea4a8 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,4 +1,5 @@ import { type L1ToL2MessageSource, type L2BlockSource } from '@aztec/circuit-types'; +import { type EthAddress } from '@aztec/foundation/eth-address'; import { type P2P } from '@aztec/p2p'; import { PublicProcessorFactory, type SimulationProvider } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; @@ -12,7 +13,6 @@ import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; -import { EthAddress } from '@aztec/foundation/eth-address'; /** * Encapsulates the full sequencer and publisher. diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 9e870d20b94..b8d159c18f8 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -17,9 +17,9 @@ import { Header, StateReference, } from '@aztec/circuits.js'; -import { Signature } from '@aztec/foundation/eth-signature'; -import { EthAddress } from '@aztec/foundation/eth-address'; import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; From d7620969ff68f5e5ed4f3a5e70bab63bdfb069ff Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:01:16 +0000 Subject: [PATCH 041/123] fmt + fix test --- .../src/p2p/block_attestation.ts | 2 +- .../circuit-types/src/p2p/block_proposal.ts | 2 +- .../circuit-types/src/p2p/block_utils.ts | 6 +- .../src/crypto/secp256k1-signer/index.ts | 2 +- .../secp256k1-signer/secp256k1_signer.test.ts | 109 +++++++++--------- .../secp256k1-signer/secp256k1_signer.ts | 13 +-- .../src/crypto/secp256k1-signer/utils.ts | 75 ++++++------ .../src/eth-signature/eth_signature.test.ts | 13 +-- .../src/eth-signature/eth_signature.ts | 8 +- .../foundation/src/eth-signature/index.ts | 2 +- yarn-project/foundation/src/fields/point.ts | 3 +- 11 files changed, 111 insertions(+), 124 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 8d338b3021c..4a22ae9a0f8 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -52,7 +52,7 @@ export class BlockAttestation extends Gossipable { * Lazily evaluate and cache the sender of the attestation * @returns The sender of the attestation */ - async getSender() { + getSender() { if (!this.sender) { // Recover the sender from the attestation const hashed = getHashedSignaturePayload(this.archive, this.txHashes); diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 83ac65ed27f..21f5a9ad68b 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -5,7 +5,7 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { TxHash } from '../tx/tx_hash.js'; -import { get0xStringHashedSignaturePayload, getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; +import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { TopicType, createTopicString } from './topic_type.js'; import { recoverAddress } from '@aztec/foundation/crypto'; diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts index cd7b2bb8c48..31fd0fc5de7 100644 --- a/yarn-project/circuit-types/src/p2p/block_utils.ts +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -2,7 +2,7 @@ import { keccak256 as keccak256Buffer } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; import { Buffer32 } from '@aztec/foundation/buffer'; -import { encodeAbiParameters, keccak256 as keccak2560xString, parseAbiParameters } from 'viem'; +import { encodeAbiParameters, parseAbiParameters } from 'viem'; import { type TxHash } from '../tx/tx_hash.js'; @@ -29,7 +29,3 @@ export function getSignaturePayload(archive: Fr, txs: TxHash[]) { export function getHashedSignaturePayload(archive: Fr, txs: TxHash[]): Buffer32 { return Buffer32.fromBuffer(keccak256Buffer(getSignaturePayload(archive, txs))); } - -export function get0xStringHashedSignaturePayload(archive: Fr, txs: TxHash[]): `0x${string}` { - return keccak2560xString(getSignaturePayload(archive, txs)); -} diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts index b5f5f62c0df..b64273685d2 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/index.ts @@ -1,2 +1,2 @@ export * from './secp256k1_signer.js'; -export * from './utils.js'; \ No newline at end of file +export * from './utils.js'; diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts index c3b24b1b8ab..707227208f2 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.test.ts @@ -1,58 +1,63 @@ +import { Buffer32 } from '@aztec/foundation/buffer'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Signature } from '@aztec/foundation/eth-signature'; -import { generatePrivateKey, type PrivateKeyAccount, privateKeyToAccount, publicKeyToAddress } from "viem/accounts"; -import { Secp256k1Signer } from "./secp256k1_signer.js"; -import {recoverPublicKey as lightRecoverPublicKey, recoverAddress as lightRecoverAddress } from "./utils.js"; -import { EthAddress } from "@aztec/foundation/eth-address"; -import { hashMessage, recoverPublicKey as viemRecoverPublicKey, recoverAddress as viemRecoverAddress } from "viem"; -import { Signature } from "@aztec/foundation/eth-signature"; -import { Buffer32 } from "@aztec/foundation/buffer"; +import { hashMessage, recoverAddress as viemRecoverAddress, recoverPublicKey as viemRecoverPublicKey } from 'viem'; +import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount, publicKeyToAddress } from 'viem/accounts'; + +import { Secp256k1Signer } from './secp256k1_signer.js'; +import { recoverAddress as lightRecoverAddress, recoverPublicKey as lightRecoverPublicKey } from './utils.js'; /** * Differential fuzzing implementation of viem's signer and the secp256k1 signer */ describe('Secp256k1Signer', () => { - let viemSigner: PrivateKeyAccount; - let lightSigner: Secp256k1Signer; - - beforeEach(() => { - const privateKey = generatePrivateKey(); - viemSigner = privateKeyToAccount(privateKey); - - lightSigner = new Secp256k1Signer(Buffer32.fromBuffer(Buffer.from(privateKey.slice(2), 'hex'))); - }); - - it('Compare implementation against viem', async () => { - const message = Buffer.from('0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', 'hex'); - // Use to compare addresses at the end - const accountAddress = viemSigner.address; - - // We use eth hashed message as viem will automatically do this with signMessage - const ethHashedMessage = hashMessage({raw: message } ); - const ethHashedMessageBuffer = Buffer32.fromBuffer(Buffer.from(ethHashedMessage.slice(2), "hex")); - - const viemSignature = Signature.from0xString(await viemSigner.signMessage({ message: { raw: message } })); - const lightSignature = lightSigner.sign(ethHashedMessageBuffer); - - // Check signatures match - expect(viemSignature.equals(lightSignature)).toBe(true); - - - const viemPublicKey = await viemRecoverPublicKey({hash: ethHashedMessage, signature: viemSignature.to0xString()}); - const lightPublicKey = lightRecoverPublicKey(ethHashedMessageBuffer, lightSignature); - - // Check recovered public keys match - expect(Buffer.from(viemPublicKey.slice(2), "hex")).toEqual(lightPublicKey); - - // Get the eth address can be recovered from the message and signature - const viemPublicKeyToAddress = publicKeyToAddress(viemPublicKey); - const viemAddress = EthAddress.fromString(await viemRecoverAddress({hash: ethHashedMessage, signature: viemSignature.to0xString()})); - const lightAddress = lightRecoverAddress(Buffer32.fromBuffer(Buffer.from(ethHashedMessage.slice(2), "hex")), lightSignature); - - // Check viem signer matches - expect(viemAddress.toString()).toEqual(accountAddress.toString().toLowerCase()); - expect(accountAddress.toString()).toEqual(viemPublicKeyToAddress.toString()); - - // Check light signer matches - expect(viemAddress.toString()).toEqual(lightAddress.toString()); - }); -}); \ No newline at end of file + let viemSigner: PrivateKeyAccount; + let lightSigner: Secp256k1Signer; + + beforeEach(() => { + const privateKey = generatePrivateKey(); + viemSigner = privateKeyToAccount(privateKey); + + lightSigner = new Secp256k1Signer(Buffer32.fromBuffer(Buffer.from(privateKey.slice(2), 'hex'))); + }); + + it('Compare implementation against viem', async () => { + const message = Buffer.from('0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', 'hex'); + // Use to compare addresses at the end + const accountAddress = viemSigner.address; + + // We use eth hashed message as viem will automatically do this with signMessage + const ethHashedMessage = hashMessage({ raw: message }); + const ethHashedMessageBuffer = Buffer32.fromBuffer(Buffer.from(ethHashedMessage.slice(2), 'hex')); + + const viemSignature = Signature.from0xString(await viemSigner.signMessage({ message: { raw: message } })); + const lightSignature = lightSigner.sign(ethHashedMessageBuffer); + + // Check signatures match + expect(viemSignature.equals(lightSignature)).toBe(true); + + const viemPublicKey = await viemRecoverPublicKey({ hash: ethHashedMessage, signature: viemSignature.to0xString() }); + const lightPublicKey = lightRecoverPublicKey(ethHashedMessageBuffer, lightSignature); + + // Check recovered public keys match + expect(Buffer.from(viemPublicKey.slice(2), 'hex')).toEqual(lightPublicKey); + + // Get the eth address can be recovered from the message and signature + const viemPublicKeyToAddress = publicKeyToAddress(viemPublicKey); + const viemAddress = EthAddress.fromString( + await viemRecoverAddress({ hash: ethHashedMessage, signature: viemSignature.to0xString() }), + ); + const lightAddress = lightRecoverAddress( + Buffer32.fromBuffer(Buffer.from(ethHashedMessage.slice(2), 'hex')), + lightSignature, + ); + + // Check viem signer matches + expect(viemAddress.toString()).toEqual(accountAddress.toString().toLowerCase()); + expect(accountAddress.toString()).toEqual(viemPublicKeyToAddress.toString()); + + // Check light signer matches + expect(viemAddress.toString()).toEqual(lightAddress.toString()); + }); +}); diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts index 2acdae10535..a062c1a32c9 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts @@ -1,7 +1,8 @@ -import { type Buffer32 } from "@aztec/foundation/buffer"; -import { type Signature } from "@aztec/foundation/eth-signature"; -import { type EthAddress } from "@aztec/foundation/eth-address"; -import { signMessage, addressFromPrivateKey } from "./utils.js"; +import { type Buffer32 } from '@aztec/foundation/buffer'; +import { type EthAddress } from '@aztec/foundation/eth-address'; +import { type Signature } from '@aztec/foundation/eth-signature'; + +import { addressFromPrivateKey, signMessage } from './utils.js'; export class Secp256k1Signer { public readonly address: EthAddress; @@ -11,8 +12,6 @@ export class Secp256k1Signer { } sign(message: Buffer32): Signature { - return signMessage(message, this.privateKey.buffer) + return signMessage(message, this.privateKey.buffer); } } - - diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts index 5d9c4f1c666..2757b559ec6 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts @@ -1,8 +1,9 @@ -import { secp256k1 } from "@noble/curves/secp256k1"; -import { EthAddress } from "../../eth-address/index.js"; -import { keccak256 } from "../keccak/index.js"; -import { Signature } from "../../eth-signature/eth_signature.js"; -import { Buffer32 } from "../../buffer/buffer32.js"; +import { secp256k1 } from '@noble/curves/secp256k1'; + +import { Buffer32 } from '../../buffer/buffer32.js'; +import { EthAddress } from '../../eth-address/index.js'; +import { Signature } from '../../eth-signature/eth_signature.js'; +import { keccak256 } from '../keccak/index.js'; function publicKeyToAddress(publicKey: Buffer): EthAddress { const hash = keccak256(publicKey.slice(1)); @@ -10,53 +11,45 @@ function publicKeyToAddress(publicKey: Buffer): EthAddress { } export function publicKeyFromPrivateKey(privateKey: Buffer): Buffer { - return Buffer.from(secp256k1.getPublicKey(privateKey, false)); + return Buffer.from(secp256k1.getPublicKey(privateKey, false)); } export function addressFromPrivateKey(privateKey: Buffer): EthAddress { - const publicKey = publicKeyFromPrivateKey(privateKey); - return publicKeyToAddress(publicKey); + const publicKey = publicKeyFromPrivateKey(privateKey); + return publicKeyToAddress(publicKey); } - export function recoverAddress(hash: Buffer32, signature: Signature): EthAddress { - const publicKey = recoverPublicKey(hash, signature); - return publicKeyToAddress(publicKey); + const publicKey = recoverPublicKey(hash, signature); + return publicKeyToAddress(publicKey); } function toRecoveryBit(yParityOrV: number) { - if (yParityOrV === 0 || yParityOrV === 1) {return yParityOrV} - if (yParityOrV === 27) {return 0} - if (yParityOrV === 28) {return 1} - throw new Error('Invalid yParityOrV value') + if (yParityOrV === 0 || yParityOrV === 1) { + return yParityOrV; + } + if (yParityOrV === 27) { + return 0; + } + if (yParityOrV === 28) { + return 1; + } + throw new Error('Invalid yParityOrV value'); } export function signMessage(message: Buffer32, privateKey: Buffer) { - const {r,s,recovery} = secp256k1.sign(message.buffer, privateKey) - return new Signature( - Buffer32.fromBigInt(r), - Buffer32.fromBigInt(s), - recovery ? 28 : 27 - ); + const { r, s, recovery } = secp256k1.sign(message.buffer, privateKey); + return new Signature(Buffer32.fromBigInt(r), Buffer32.fromBigInt(s), recovery ? 28 : 27); } -export function recoverPublicKey( - hash: Buffer32, - signature: Signature, - ): Buffer { - - const signature_ = (() => { - // typeof signature: `Signature` - const { r, s, v } = signature - const recoveryBit = toRecoveryBit(v) - return new secp256k1.Signature( - r.toBigInt(), - s.toBigInt(), - ).addRecoveryBit(recoveryBit) - })() - - const publicKey = signature_ - .recoverPublicKey(hash.buffer) - .toHex(false) - return Buffer.from(publicKey, 'hex'); - } \ No newline at end of file +export function recoverPublicKey(hash: Buffer32, signature: Signature): Buffer { + const signature_ = (() => { + // typeof signature: `Signature` + const { r, s, v } = signature; + const recoveryBit = toRecoveryBit(v); + return new secp256k1.Signature(r.toBigInt(), s.toBigInt()).addRecoveryBit(recoveryBit); + })(); + + const publicKey = signature_.recoverPublicKey(hash.buffer).toHex(false); + return Buffer.from(publicKey, 'hex'); +} diff --git a/yarn-project/foundation/src/eth-signature/eth_signature.test.ts b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts index 1dfec4a83ac..d8fc489a675 100644 --- a/yarn-project/foundation/src/eth-signature/eth_signature.test.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts @@ -1,14 +1,13 @@ - +import { Buffer32 } from '@aztec/foundation/buffer'; +import { Secp256k1Signer, recoverAddress } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { Signature } from './eth_signature.js'; -import { Secp256k1Signer, recoverAddress } from '@aztec/foundation/crypto'; -import { Buffer32 } from '@aztec/foundation/buffer'; const randomSigner = () => { - const pk = Buffer32.random(); - return new Secp256k1Signer(pk); -} + const pk = Buffer32.random(); + return new Secp256k1Signer(pk); +}; describe('Signature serialization / deserialization', () => { it('Should serialize / deserialize', () => { @@ -19,8 +18,6 @@ describe('Signature serialization / deserialization', () => { const signature = signer.sign(message); - console.log(signature); - // Serde const serialized = signature.toBuffer(); const deserialized = Signature.fromBuffer(serialized); diff --git a/yarn-project/foundation/src/eth-signature/eth_signature.ts b/yarn-project/foundation/src/eth-signature/eth_signature.ts index f5d7eb481b1..2808ea63ce3 100644 --- a/yarn-project/foundation/src/eth-signature/eth_signature.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.ts @@ -1,5 +1,5 @@ -import { Buffer32 } from "@aztec/foundation/buffer"; -import { serializeToBuffer, BufferReader } from "@aztec/foundation/serialize"; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; /**Viem Signature * @@ -19,7 +19,6 @@ export type ViemSignature = { * Contains a signature split into it's primary components (r,s,v) */ export class Signature { - constructor( /** The r value of the signature */ public readonly r: Buffer32, @@ -35,9 +34,7 @@ export class Signature { const reader = BufferReader.asReader(buf); const r = reader.readObject(Buffer32); - console.log("reading r ", r); const s = reader.readObject(Buffer32); - console.log("reading s", s); const v = reader.readNumber(); const isEmpty = r.isZero() && s.isZero(); @@ -75,7 +72,6 @@ export class Signature { return serializeToBuffer([this.r, this.s, this.v]); } - to0xString(): `0x${string}` { return `0x${this.r.toString()}${this.s.toString()}${this.v.toString(16)}`; } diff --git a/yarn-project/foundation/src/eth-signature/index.ts b/yarn-project/foundation/src/eth-signature/index.ts index a0ea107137d..73bc59dbe1a 100644 --- a/yarn-project/foundation/src/eth-signature/index.ts +++ b/yarn-project/foundation/src/eth-signature/index.ts @@ -1 +1 @@ -export * from './eth_signature.js'; \ No newline at end of file +export * from './eth_signature.js'; diff --git a/yarn-project/foundation/src/fields/point.ts b/yarn-project/foundation/src/fields/point.ts index 458252bd87a..3f3620f9348 100644 --- a/yarn-project/foundation/src/fields/point.ts +++ b/yarn-project/foundation/src/fields/point.ts @@ -1,5 +1,6 @@ import { toBigIntBE } from '../bigint-buffer/index.js'; -import { poseidon2Hash, randomBoolean } from '../crypto/index.js'; +import { poseidon2Hash } from '../crypto/poseidon/index.js'; +import { randomBoolean } from '../crypto/random/index.js'; import { BufferReader, FieldReader, serializeToBuffer } from '../serialize/index.js'; import { Fr } from './fields.js'; From afb46ca0d27b5ea85da3a8e3c60f8b61aa578aae Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:03:47 +0000 Subject: [PATCH 042/123] fmt --- yarn-project/circuit-types/src/p2p/block_attestation.ts | 4 ++-- yarn-project/circuit-types/src/p2p/block_proposal.ts | 2 +- yarn-project/circuit-types/src/p2p/block_utils.ts | 2 +- yarn-project/circuit-types/src/p2p/mocks.ts | 4 ++-- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 6 +++--- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 4a22ae9a0f8..16581b593b2 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -1,14 +1,14 @@ import { type EthAddress, Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; +import { recoverAddress } from '@aztec/foundation/crypto'; import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { TxHash } from '../tx/tx_hash.js'; -import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; +import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { TopicType, createTopicString } from './topic_type.js'; -import { recoverAddress } from '@aztec/foundation/crypto'; export class BlockAttestationHash extends Buffer32 { constructor(hash: Buffer) { diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 21f5a9ad68b..a480296b9c6 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -1,5 +1,6 @@ import { type EthAddress, Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; +import { recoverAddress } from '@aztec/foundation/crypto'; import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -8,7 +9,6 @@ import { TxHash } from '../tx/tx_hash.js'; import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { TopicType, createTopicString } from './topic_type.js'; -import { recoverAddress } from '@aztec/foundation/crypto'; export class BlockProposalHash extends Buffer32 { constructor(hash: Buffer) { diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts index 31fd0fc5de7..40c9193ab9c 100644 --- a/yarn-project/circuit-types/src/p2p/block_utils.ts +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -1,6 +1,6 @@ +import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256 as keccak256Buffer } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; -import { Buffer32 } from '@aztec/foundation/buffer'; import { encodeAbiParameters, parseAbiParameters } from 'viem'; diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 6824cc37d93..495cf33be03 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -1,12 +1,12 @@ import { makeHeader } from '@aztec/circuits.js/testing'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { TxHash } from '../tx/tx_hash.js'; import { BlockAttestation } from './block_attestation.js'; import { BlockProposal } from './block_proposal.js'; import { getHashedSignaturePayload } from './block_utils.js'; -import { Secp256k1Signer } from '@aztec/foundation/crypto'; -import { Buffer32 } from '@aztec/foundation/buffer'; export const makeBlockProposal = (signer?: Secp256k1Signer): BlockProposal => { signer = signer || randomSigner(); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index b8d159c18f8..3b4d8cce56b 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -536,7 +536,7 @@ export class Sequencer { this.log.verbose(`Collected attestations from validators, number of attestations: ${attestations.length}`); // note: the smart contract requires that the signatures are provided in the order of the committee - return await orderAttestations(attestations, committee); + return orderAttestations(attestations, committee); } /** @@ -665,12 +665,12 @@ export enum SequencerState { * * @todo: perform this logic within the memory attestation store instead? */ -async function orderAttestations(attestations: BlockAttestation[], orderAddresses: EthAddress[]): Promise { +function orderAttestations(attestations: BlockAttestation[], orderAddresses: EthAddress[]): Signature[] { // Create a map of sender addresses to BlockAttestations const attestationMap = new Map(); for (const attestation of attestations) { - const sender = await attestation.getSender(); + const sender = attestation.getSender(); if (sender) { attestationMap.set(sender.toString(), attestation); } From 37929f5a4104ae3bb4236f441d164e7e70748c58 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:13:07 +0000 Subject: [PATCH 043/123] document secp256 helpers --- .../foundation/src/buffer/buffer32.ts | 18 +++++++ .../secp256k1-signer/secp256k1_signer.ts | 7 +++ .../src/crypto/secp256k1-signer/utils.ts | 54 +++++++++++++++---- .../src/key_store/local_key_store.ts | 4 -- 4 files changed, 70 insertions(+), 13 deletions(-) diff --git a/yarn-project/foundation/src/buffer/buffer32.ts b/yarn-project/foundation/src/buffer/buffer32.ts index 736c0ada457..a3a1d6b6702 100644 --- a/yarn-project/foundation/src/buffer/buffer32.ts +++ b/yarn-project/foundation/src/buffer/buffer32.ts @@ -112,7 +112,25 @@ export class Buffer32 { * @param str - The TX hash in string format. * @returns A new Buffer32 object. */ + public static fromStringUnchecked(str: string): Buffer32 { + return new Buffer32(Buffer.from(str, 'hex')); + } + + /** + * Converts a string into a Buffer32 object. + * NOTE: this method includes checks for the 0x prefix and the length of the string. + * if you dont need this checks, use fromStringUnchecked instead. + * + * @param str - The TX hash in string format. + * @returns A new Buffer32 object. + */ public static fromString(str: string): Buffer32 { + if (str.startsWith('0x')) { + str = str.slice(2); + } + if (str.length !== 64) { + throw new Error(`Expected string to be 64 characters long, but was ${str.length}`); + } return new Buffer32(Buffer.from(str, 'hex')); } diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts index a062c1a32c9..f3bc42699cb 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts @@ -4,6 +4,13 @@ import { type Signature } from '@aztec/foundation/eth-signature'; import { addressFromPrivateKey, signMessage } from './utils.js'; +/** + * Secp256k1Signer + * + * A class for signing messages using a secp256k1 private key. + * - This is a slim drop in replacement for an Ethereum signer, so it can be used in the same way. + * - See `utils.ts` for functions that enable recovering addresses and public keys from signatures. + */ export class Secp256k1Signer { public readonly address: EthAddress; diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts index 2757b559ec6..13a1662be48 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts @@ -5,25 +5,52 @@ import { EthAddress } from '../../eth-address/index.js'; import { Signature } from '../../eth-signature/eth_signature.js'; import { keccak256 } from '../keccak/index.js'; +/** + * Converts a public key to an address. + * @param publicKey - The public key to convert. + * @returns The address. + */ function publicKeyToAddress(publicKey: Buffer): EthAddress { - const hash = keccak256(publicKey.slice(1)); + const hash = keccak256(publicKey.subarray(1)); return new EthAddress(hash.subarray(12)); } +/** + * Converts a private key to a public key. + * @param privateKey - The private key to convert. + * @returns The public key. + */ export function publicKeyFromPrivateKey(privateKey: Buffer): Buffer { return Buffer.from(secp256k1.getPublicKey(privateKey, false)); } +/** + * Converts a private key to an address. + * @param privateKey - The private key to convert. + * @returns The address. + */ export function addressFromPrivateKey(privateKey: Buffer): EthAddress { const publicKey = publicKeyFromPrivateKey(privateKey); return publicKeyToAddress(publicKey); } +/** + * Recovers an address from a hash and a signature. + * @param hash - The hash to recover the address from. + * @param signature - The signature to recover the address from. + * @returns The address. + */ export function recoverAddress(hash: Buffer32, signature: Signature): EthAddress { const publicKey = recoverPublicKey(hash, signature); return publicKeyToAddress(publicKey); } +/** + * @attribution - viem + * Converts a yParityOrV value to a recovery bit. + * @param yParityOrV - The yParityOrV value to convert. + * @returns The recovery bit. + */ function toRecoveryBit(yParityOrV: number) { if (yParityOrV === 0 || yParityOrV === 1) { return yParityOrV; @@ -37,19 +64,28 @@ function toRecoveryBit(yParityOrV: number) { throw new Error('Invalid yParityOrV value'); } +/** + * Signs a message using ecdsa over the secp256k1 curve. + * @param message - The message to sign. + * @param privateKey - The private key to sign the message with. + * @returns The signature. + */ export function signMessage(message: Buffer32, privateKey: Buffer) { const { r, s, recovery } = secp256k1.sign(message.buffer, privateKey); return new Signature(Buffer32.fromBigInt(r), Buffer32.fromBigInt(s), recovery ? 28 : 27); } +/** + * Recovers a public key from a hash and a signature. + * @param hash - The hash to recover the public key from. + * @param signature - The signature to recover the public key from. + * @returns The public key. + */ export function recoverPublicKey(hash: Buffer32, signature: Signature): Buffer { - const signature_ = (() => { - // typeof signature: `Signature` - const { r, s, v } = signature; - const recoveryBit = toRecoveryBit(v); - return new secp256k1.Signature(r.toBigInt(), s.toBigInt()).addRecoveryBit(recoveryBit); - })(); - - const publicKey = signature_.recoverPublicKey(hash.buffer).toHex(false); + const { r, s, v } = signature; + const recoveryBit = toRecoveryBit(v); + const sig = new secp256k1.Signature(r.toBigInt(), s.toBigInt()).addRecoveryBit(recoveryBit); + + const publicKey = sig.recoverPublicKey(hash.buffer).toHex(false); return Buffer.from(publicKey, 'hex'); } diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index ddc807e0534..a789298e091 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -13,10 +13,6 @@ export class LocalKeyStore implements ValidatorKeyStore { private signer: Secp256k1Signer; constructor(privateKey: string) { - // Trim 0x prefix - if (privateKey.startsWith('0x')) { - privateKey = privateKey.slice(2); - } this.signer = new Secp256k1Signer(Buffer32.fromString(privateKey)); } From 1cb668763a75f1ca4cb1d59bb1256baf6e30c17f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:15:27 +0000 Subject: [PATCH 044/123] clean --- yarn-project/circuit-types/src/p2p/block_utils.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts index 40c9193ab9c..79e8cc685a7 100644 --- a/yarn-project/circuit-types/src/p2p/block_utils.ts +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -1,5 +1,5 @@ import { Buffer32 } from '@aztec/foundation/buffer'; -import { keccak256 as keccak256Buffer } from '@aztec/foundation/crypto'; +import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; import { encodeAbiParameters, parseAbiParameters } from 'viem'; @@ -27,5 +27,5 @@ export function getSignaturePayload(archive: Fr, txs: TxHash[]) { * @returns The hashed payload for the signature of the block proposal */ export function getHashedSignaturePayload(archive: Fr, txs: TxHash[]): Buffer32 { - return Buffer32.fromBuffer(keccak256Buffer(getSignaturePayload(archive, txs))); + return Buffer32.fromBuffer(keccak256(getSignaturePayload(archive, txs))); } From 62b7c720d3d9a69c4afadbd543092ce9fcf0d552 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:20:40 +0000 Subject: [PATCH 045/123] clean --- .../attestation_pool/memory_attestation_pool.ts | 7 ++++--- yarn-project/p2p/src/service/reqresp/interface.ts | 6 +----- .../validator-client/src/validator.test.ts | 14 +++++++------- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts b/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts index 524a399aece..5d0d2002425 100644 --- a/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts +++ b/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts @@ -19,18 +19,19 @@ export class InMemoryAttestationPool implements AttestationPool { } } - public async addAttestations(attestations: BlockAttestation[]): Promise { + public addAttestations(attestations: BlockAttestation[]): Promise { for (const attestation of attestations) { // Perf: order and group by slot before insertion const slotNumber = attestation.header.globalVariables.slotNumber; - const address = await attestation.getSender(); + const address = attestation.getSender(); const slotAttestationMap = getSlotOrDefault(this.attestations, slotNumber.toBigInt()); slotAttestationMap.set(address.toString(), attestation); this.log.verbose(`Added attestation for slot ${slotNumber} from ${address}`); } + return Promise.resolve(); } public deleteAttestationsForSlot(slot: bigint): Promise { @@ -45,7 +46,7 @@ export class InMemoryAttestationPool implements AttestationPool { const slotNumber = attestation.header.globalVariables.slotNumber; const slotAttestationMap = this.attestations.get(slotNumber.toBigInt()); if (slotAttestationMap) { - const address = await attestation.getSender(); + const address = attestation.getSender(); slotAttestationMap.delete(address.toString()); this.log.verbose(`Deleted attestation for slot ${slotNumber} from ${address}`); } diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 84e1ca45258..023f374e671 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -6,11 +6,9 @@ import { Tx, TxHash } from '@aztec/circuit-types'; export const PING_PROTOCOL = '/aztec/req/ping/0.1.0'; export const STATUS_PROTOCOL = '/aztec/req/status/0.1.0'; export const TX_REQ_PROTOCOL = '/aztec/req/tx/0.1.0'; -// export const ATTESTATION_REQ_PROTOCOL = '/aztec/req/attestation/0.1.0'; // Sum type for sub protocols export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | typeof TX_REQ_PROTOCOL; -// | typeof ATTESTATION_REQ_PROTOCOL; /** * A handler for a sub protocol @@ -76,7 +74,6 @@ export const DEFAULT_SUB_PROTOCOL_HANDLERS: ReqRespSubProtocolHandlers = { [PING_PROTOCOL]: defaultHandler, [STATUS_PROTOCOL]: defaultHandler, [TX_REQ_PROTOCOL]: defaultHandler, - // [ATTESTATION_REQ_PROTOCOL]: defaultHandler, }; /** @@ -129,6 +126,5 @@ export const subProtocolMap: SubProtocolMap = { [TX_REQ_PROTOCOL]: { request: TxHash, response: Tx, - }, - // [ATTESTATION_REQ_PROTOCOL]: { + } }; diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 4802656e5ce..fe5406287d0 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -46,24 +46,24 @@ describe('ValidationService', () => { expect(blockProposal).toBeDefined(); const validatorAddress = EthAddress.fromString(validatorAccount.address); - expect(await blockProposal.getSender()).toEqual(validatorAddress); + expect(blockProposal.getSender()).toEqual(validatorAddress); }); - it('Should a timeout if we do not collect enough attestations in time', async () => { - const proposal = await makeBlockProposal(); + it('Should a timeout if we do not collect enough attestations in time', () => { + const proposal = makeBlockProposal(); - await expect(validatorClient.collectAttestations(proposal, 2)).rejects.toThrow(AttestationTimeoutError); + expect(validatorClient.collectAttestations(proposal, 2)).rejects.toThrow(AttestationTimeoutError); }); - it('Should throw an error if the transactions are not available', async () => { - const proposal = await makeBlockProposal(); + it('Should throw an error if the transactions are not available', () => { + const proposal = makeBlockProposal(); // mock the p2pClient.getTxStatus to return undefined for all transactions p2pClient.getTxStatus.mockImplementation(() => undefined); // Mock the p2pClient.requestTxs to return undefined for all transactions p2pClient.requestTxs.mockImplementation(() => Promise.resolve([undefined])); - await expect(validatorClient.ensureTransactionsAreAvailable(proposal)).rejects.toThrow( + expect(validatorClient.ensureTransactionsAreAvailable(proposal)).rejects.toThrow( TransactionsNotAvailableError, ); }); From 421470e9ff613f6d9ddcf7aab163094bddc78adc Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 16 Sep 2024 11:10:29 +0000 Subject: [PATCH 046/123] fix: validator pk validation --- .../src/duties/validation_service.ts | 2 +- .../src/errors/validator.error.ts | 1 - .../src/key_store/interface.ts | 2 +- .../src/key_store/local_key_store.ts | 8 +++---- .../validator-client/src/validator.test.ts | 23 ++++++++++++++----- .../validator-client/src/validator.ts | 19 ++++++++++++--- 6 files changed, 39 insertions(+), 16 deletions(-) diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 6b0291a0939..451de45e3d7 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -1,8 +1,8 @@ import { BlockAttestation, BlockProposal, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; -import { Buffer32 } from '@aztec/foundation/buffer'; import { type ValidatorKeyStore } from '../key_store/interface.js'; diff --git a/yarn-project/validator-client/src/errors/validator.error.ts b/yarn-project/validator-client/src/errors/validator.error.ts index 36ad31720be..73c3eaf2729 100644 --- a/yarn-project/validator-client/src/errors/validator.error.ts +++ b/yarn-project/validator-client/src/errors/validator.error.ts @@ -6,7 +6,6 @@ export class ValidatorError extends Error { } } -// TODO(md): add unit test export class InvalidValidatorPrivateKeyError extends ValidatorError { constructor() { super('Invalid validator private key provided'); diff --git a/yarn-project/validator-client/src/key_store/interface.ts b/yarn-project/validator-client/src/key_store/interface.ts index fbacfe71904..e83204e8a0d 100644 --- a/yarn-project/validator-client/src/key_store/interface.ts +++ b/yarn-project/validator-client/src/key_store/interface.ts @@ -1,5 +1,5 @@ -import { type Signature } from '@aztec/foundation/eth-signature'; import { type Buffer32 } from '@aztec/foundation/buffer'; +import { type Signature } from '@aztec/foundation/eth-signature'; /** Key Store * diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index a789298e091..33e6273f1a4 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -1,6 +1,6 @@ -import { type Signature } from '@aztec/foundation/eth-signature'; +import { type Buffer32 } from '@aztec/foundation/buffer'; import { Secp256k1Signer } from '@aztec/foundation/crypto'; -import { Buffer32 } from '@aztec/foundation/buffer'; +import { type Signature } from '@aztec/foundation/eth-signature'; import { type ValidatorKeyStore } from './interface.js'; @@ -12,8 +12,8 @@ import { type ValidatorKeyStore } from './interface.js'; export class LocalKeyStore implements ValidatorKeyStore { private signer: Secp256k1Signer; - constructor(privateKey: string) { - this.signer = new Secp256k1Signer(Buffer32.fromString(privateKey)); + constructor(privateKey: Buffer32) { + this.signer = new Secp256k1Signer(privateKey); } /** diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index fe5406287d0..fc870184fec 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -12,10 +12,16 @@ import { type MockProxy, mock } from 'jest-mock-extended'; import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; import { makeBlockProposal } from '../../circuit-types/src/p2p/mocks.js'; -import { AttestationTimeoutError, TransactionsNotAvailableError } from './errors/validator.error.js'; +import { type ValidatorClientConfig } from './config.js'; +import { + AttestationTimeoutError, + InvalidValidatorPrivateKeyError, + TransactionsNotAvailableError, +} from './errors/validator.error.js'; import { ValidatorClient } from './validator.js'; describe('ValidationService', () => { + let config: ValidatorClientConfig; let validatorClient: ValidatorClient; let p2pClient: MockProxy; let validatorAccount: PrivateKeyAccount; @@ -27,7 +33,7 @@ describe('ValidationService', () => { const validatorPrivateKey = generatePrivateKey(); validatorAccount = privateKeyToAccount(validatorPrivateKey); - const config = { + config = { validatorPrivateKey: validatorPrivateKey, attestationPoolingIntervalMs: 1000, attestationWaitTimeoutMs: 1000, @@ -36,6 +42,11 @@ describe('ValidationService', () => { validatorClient = ValidatorClient.new(config, p2pClient); }); + it('Should throw error if an invalid private key is provided', () => { + config.validatorPrivateKey = '0x1234567890123456789'; + expect(() => ValidatorClient.new(config, p2pClient)).toThrow(InvalidValidatorPrivateKeyError); + }); + it('Should create a valid block proposal', async () => { const header = makeHeader(); const archive = Fr.random(); @@ -49,13 +60,13 @@ describe('ValidationService', () => { expect(blockProposal.getSender()).toEqual(validatorAddress); }); - it('Should a timeout if we do not collect enough attestations in time', () => { + it('Should a timeout if we do not collect enough attestations in time', async () => { const proposal = makeBlockProposal(); - expect(validatorClient.collectAttestations(proposal, 2)).rejects.toThrow(AttestationTimeoutError); + await expect(validatorClient.collectAttestations(proposal, 2)).rejects.toThrow(AttestationTimeoutError); }); - it('Should throw an error if the transactions are not available', () => { + it('Should throw an error if the transactions are not available', async () => { const proposal = makeBlockProposal(); // mock the p2pClient.getTxStatus to return undefined for all transactions @@ -63,7 +74,7 @@ describe('ValidationService', () => { // Mock the p2pClient.requestTxs to return undefined for all transactions p2pClient.requestTxs.mockImplementation(() => Promise.resolve([undefined])); - expect(validatorClient.ensureTransactionsAreAvailable(proposal)).rejects.toThrow( + await expect(validatorClient.ensureTransactionsAreAvailable(proposal)).rejects.toThrow( TransactionsNotAvailableError, ); }); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index cbd1260d70f..890e413ffff 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,5 +1,6 @@ import { type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; @@ -7,7 +8,11 @@ import { type P2P } from '@aztec/p2p'; import { type ValidatorClientConfig } from './config.js'; import { ValidationService } from './duties/validation_service.js'; -import { AttestationTimeoutError, InvalidValidatorPrivateKeyError, TransactionsNotAvailableError } from './errors/validator.error.js'; +import { + AttestationTimeoutError, + InvalidValidatorPrivateKeyError, + TransactionsNotAvailableError, +} from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; @@ -43,11 +48,11 @@ export class ValidatorClient implements Validator { static new(config: ValidatorClientConfig, p2pClient: P2P) { if (!config.validatorPrivateKey) { - // TODO: more validation here? throw new InvalidValidatorPrivateKeyError(); } - const localKeyStore = new LocalKeyStore(config.validatorPrivateKey); + const privateKey = validatePrivateKey(config.validatorPrivateKey); + const localKeyStore = new LocalKeyStore(privateKey); const validator = new ValidatorClient( localKeyStore, @@ -159,3 +164,11 @@ export class ValidatorClient implements Validator { } } } + +function validatePrivateKey(privateKey: string): Buffer32 { + try { + return Buffer32.fromString(privateKey); + } catch (error) { + throw new InvalidValidatorPrivateKeyError(); + } +} From f0f87a4b801e671ffdf1237ac3ff7a7ab3d22c2c Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 16 Sep 2024 21:13:47 +0000 Subject: [PATCH 047/123] fix: reinstante eip712 --- .../src/core/sequencer_selection/Leonidas.sol | 5 ++++- l1-contracts/test/sparta/Sparta.t.sol | 9 +++++++-- .../circuit-types/src/p2p/block_attestation.ts | 4 ++-- .../circuit-types/src/p2p/block_proposal.ts | 8 ++++++-- yarn-project/circuit-types/src/p2p/block_utils.ts | 13 ++++++++++++- .../src/crypto/secp256k1-signer/secp256k1_signer.ts | 12 +++++++++++- .../foundation/src/crypto/secp256k1-signer/utils.ts | 8 ++++++++ .../src/attestation_pool/memory_attestation_pool.ts | 2 +- yarn-project/p2p/src/service/reqresp/interface.ts | 2 +- .../src/duties/validation_service.ts | 4 ++-- .../validator-client/src/key_store/interface.ts | 8 ++++++++ .../src/key_store/local_key_store.ts | 6 ++++++ 12 files changed, 68 insertions(+), 13 deletions(-) diff --git a/l1-contracts/src/core/sequencer_selection/Leonidas.sol b/l1-contracts/src/core/sequencer_selection/Leonidas.sol index f4f2da1c43f..c57a9ff03cf 100644 --- a/l1-contracts/src/core/sequencer_selection/Leonidas.sol +++ b/l1-contracts/src/core/sequencer_selection/Leonidas.sol @@ -9,6 +9,7 @@ import {Ownable} from "@oz/access/Ownable.sol"; import {SignatureLib} from "./SignatureLib.sol"; import {SampleLib} from "./SampleLib.sol"; import {Constants} from "../libraries/ConstantsGen.sol"; +import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {ILeonidas} from "./ILeonidas.sol"; @@ -29,6 +30,7 @@ import {ILeonidas} from "./ILeonidas.sol"; contract Leonidas is Ownable, ILeonidas { using EnumerableSet for EnumerableSet.AddressSet; using SignatureLib for SignatureLib.Signature; + using MessageHashUtils for bytes32; /** * @notice The data structure for an epoch @@ -376,6 +378,7 @@ contract Leonidas is Ownable, ILeonidas { // Validate the attestations uint256 validAttestations = 0; + bytes32 digest = _digest.toEthSignedMessageHash(); for (uint256 i = 0; i < _signatures.length; i++) { SignatureLib.Signature memory signature = _signatures[i]; if (signature.isEmpty) { @@ -383,7 +386,7 @@ contract Leonidas is Ownable, ILeonidas { } // The verification will throw if invalid - signature.verify(committee[i], _digest); + signature.verify(committee[i], digest); validAttestations++; } diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index fff0a2828d5..76ebb42144f 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -19,12 +19,15 @@ import {MerkleTestUtil} from "../merkle/TestUtil.sol"; import {PortalERC20} from "../portals/PortalERC20.sol"; import {TxsDecoderHelper} from "../decoders/helpers/TxsDecoderHelper.sol"; import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; +import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; + /** * We are using the same blocks as from Rollup.t.sol. * The tests in this file is testing the sequencer selection */ - contract SpartaTest is DecoderBase { + using MessageHashUtils for bytes32; + Registry internal registry; Inbox internal inbox; Outbox internal outbox; @@ -277,7 +280,9 @@ contract SpartaTest is DecoderBase { returns (SignatureLib.Signature memory) { uint256 privateKey = privateKeys[_signer]; - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); + + bytes32 digest = _digest.toEthSignedMessageHash(); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); return SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}); } diff --git a/yarn-project/circuit-types/src/p2p/block_attestation.ts b/yarn-project/circuit-types/src/p2p/block_attestation.ts index 16581b593b2..ec554f45203 100644 --- a/yarn-project/circuit-types/src/p2p/block_attestation.ts +++ b/yarn-project/circuit-types/src/p2p/block_attestation.ts @@ -6,7 +6,7 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { TxHash } from '../tx/tx_hash.js'; -import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; +import { getHashedSignaturePayloadEthSignedMessage, getSignaturePayload } from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { TopicType, createTopicString } from './topic_type.js'; @@ -55,7 +55,7 @@ export class BlockAttestation extends Gossipable { getSender() { if (!this.sender) { // Recover the sender from the attestation - const hashed = getHashedSignaturePayload(this.archive, this.txHashes); + const hashed = getHashedSignaturePayloadEthSignedMessage(this.archive, this.txHashes); // Cache the sender for later use this.sender = recoverAddress(hashed, this.signature); } diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index a480296b9c6..7da744c377a 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -6,7 +6,11 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { TxHash } from '../tx/tx_hash.js'; -import { getHashedSignaturePayload, getSignaturePayload } from './block_utils.js'; +import { + getHashedSignaturePayload, + getHashedSignaturePayloadEthSignedMessage, + getSignaturePayload, +} from './block_utils.js'; import { Gossipable } from './gossipable.js'; import { TopicType, createTopicString } from './topic_type.js'; @@ -66,7 +70,7 @@ export class BlockProposal extends Gossipable { */ getSender() { if (!this.sender) { - const hashed = getHashedSignaturePayload(this.archive, this.txs); + const hashed = getHashedSignaturePayloadEthSignedMessage(this.archive, this.txs); // Cache the sender for later use this.sender = recoverAddress(hashed, this.signature); } diff --git a/yarn-project/circuit-types/src/p2p/block_utils.ts b/yarn-project/circuit-types/src/p2p/block_utils.ts index 79e8cc685a7..d4f4cfc34b7 100644 --- a/yarn-project/circuit-types/src/p2p/block_utils.ts +++ b/yarn-project/circuit-types/src/p2p/block_utils.ts @@ -1,5 +1,5 @@ import { Buffer32 } from '@aztec/foundation/buffer'; -import { keccak256 } from '@aztec/foundation/crypto'; +import { keccak256, makeEthSignDigest } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; import { encodeAbiParameters, parseAbiParameters } from 'viem'; @@ -29,3 +29,14 @@ export function getSignaturePayload(archive: Fr, txs: TxHash[]) { export function getHashedSignaturePayload(archive: Fr, txs: TxHash[]): Buffer32 { return Buffer32.fromBuffer(keccak256(getSignaturePayload(archive, txs))); } + +/** + * Get the hashed payload for the signature of the block proposal as an Ethereum signed message EIP-712 + * @param archive - The archive of the block + * @param txs - The transactions in the block + * @returns The hashed payload for the signature of the block proposal as an Ethereum signed message + */ +export function getHashedSignaturePayloadEthSignedMessage(archive: Fr, txs: TxHash[]): Buffer32 { + const payload = getHashedSignaturePayload(archive, txs); + return makeEthSignDigest(payload); +} diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts index f3bc42699cb..1af00bc4026 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/secp256k1_signer.ts @@ -2,7 +2,7 @@ import { type Buffer32 } from '@aztec/foundation/buffer'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { type Signature } from '@aztec/foundation/eth-signature'; -import { addressFromPrivateKey, signMessage } from './utils.js'; +import { addressFromPrivateKey, makeEthSignDigest, signMessage } from './utils.js'; /** * Secp256k1Signer @@ -21,4 +21,14 @@ export class Secp256k1Signer { sign(message: Buffer32): Signature { return signMessage(message, this.privateKey.buffer); } + + /** + * Sign a message using the same method as eth_sign + * @param message - The message to sign. + * @returns The signature. + */ + signMessage(message: Buffer32): Signature { + const digest = makeEthSignDigest(message); + return this.sign(digest); + } } diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts index 13a1662be48..c3be5e04b3a 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts @@ -5,6 +5,14 @@ import { EthAddress } from '../../eth-address/index.js'; import { Signature } from '../../eth-signature/eth_signature.js'; import { keccak256 } from '../keccak/index.js'; +const ETH_SIGN_PREFIX = '\x19Ethereum Signed Message:\n32'; + +// We just hash the message to make it easier to work with in the smart contract. +export function makeEthSignDigest(message: Buffer32): Buffer32 { + const prefix = Buffer.from(ETH_SIGN_PREFIX); + return Buffer32.fromBuffer(keccak256(Buffer.concat([prefix, message.buffer]))); +} + /** * Converts a public key to an address. * @param publicKey - The public key to convert. diff --git a/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts b/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts index 5d0d2002425..967429825da 100644 --- a/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts +++ b/yarn-project/p2p/src/attestation_pool/memory_attestation_pool.ts @@ -41,7 +41,7 @@ export class InMemoryAttestationPool implements AttestationPool { return Promise.resolve(); } - public async deleteAttestations(attestations: BlockAttestation[]): Promise { + public deleteAttestations(attestations: BlockAttestation[]): Promise { for (const attestation of attestations) { const slotNumber = attestation.header.globalVariables.slotNumber; const slotAttestationMap = this.attestations.get(slotNumber.toBigInt()); diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 023f374e671..606efc17bc9 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -126,5 +126,5 @@ export const subProtocolMap: SubProtocolMap = { [TX_REQ_PROTOCOL]: { request: TxHash, response: Tx, - } + }, }; diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 451de45e3d7..6df10436870 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -19,7 +19,7 @@ export class ValidationService { * @returns A block proposal signing the above information (not the current implementation!!!) */ createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { - const payloadSigner = (payload: Buffer32) => this.keyStore.sign(payload); + const payloadSigner = (payload: Buffer32) => this.keyStore.signMessage(payload); return BlockProposal.createProposalFromSigner(header, archive, txs, payloadSigner); } @@ -37,7 +37,7 @@ export class ValidationService { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7961): check that the current validator is correct const buf = Buffer32.fromBuffer(keccak256(proposal.getPayload())); - const sig = await this.keyStore.sign(buf); + const sig = await this.keyStore.signMessage(buf); return new BlockAttestation(proposal.header, proposal.archive, proposal.txs, sig); } } diff --git a/yarn-project/validator-client/src/key_store/interface.ts b/yarn-project/validator-client/src/key_store/interface.ts index e83204e8a0d..2b96c90377b 100644 --- a/yarn-project/validator-client/src/key_store/interface.ts +++ b/yarn-project/validator-client/src/key_store/interface.ts @@ -7,4 +7,12 @@ import { type Signature } from '@aztec/foundation/eth-signature'; */ export interface ValidatorKeyStore { sign(message: Buffer32): Promise; + /** + * Flavor of sign message that followed EIP-712 eth signed message prefix + * Note: this is only required when we are using ecdsa signatures over secp256k1 + * + * @param message - The message to sign. + * @returns The signature. + */ + signMessage(message: Buffer32): Promise; } diff --git a/yarn-project/validator-client/src/key_store/local_key_store.ts b/yarn-project/validator-client/src/key_store/local_key_store.ts index 33e6273f1a4..736ee361a45 100644 --- a/yarn-project/validator-client/src/key_store/local_key_store.ts +++ b/yarn-project/validator-client/src/key_store/local_key_store.ts @@ -27,4 +27,10 @@ export class LocalKeyStore implements ValidatorKeyStore { return Promise.resolve(signature); } + + public signMessage(message: Buffer32): Promise { + // Sign message adds eth sign prefix and hashes before signing + const signature = this.signer.signMessage(message); + return Promise.resolve(signature); + } } From 7a33bef7995e67079e025a4ffbcc28fbf2520490 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 16 Sep 2024 22:18:09 +0000 Subject: [PATCH 048/123] refactor: remove multiple public dbs --- .../simulator/src/avm/avm_simulator.test.ts | 33 +++-- .../simulator/src/avm/fixtures/index.ts | 32 +---- .../simulator/src/avm/journal/host_storage.ts | 14 --- .../simulator/src/avm/journal/index.ts | 1 - .../simulator/src/avm/journal/journal.test.ts | 20 +-- .../simulator/src/avm/journal/journal.ts | 31 ++--- .../src/avm/opcodes/accrued_substate.test.ts | 23 ++-- .../src/avm/opcodes/contract.test.ts | 12 +- .../src/avm/opcodes/external_calls.test.ts | 18 +-- yarn-project/simulator/src/avm/test_utils.ts | 36 +++--- .../src/public/app_logic_phase_manager.ts | 15 ++- yarn-project/simulator/src/public/executor.ts | 16 +-- .../src/public/phase_manager_factory.ts | 42 ++----- .../src/public/public_db_sources.test.ts | 117 +++++++++--------- .../simulator/src/public/public_db_sources.ts | 115 ++++++++--------- .../src/public/public_processor.test.ts | 116 +++++++++-------- .../simulator/src/public/public_processor.ts | 37 +++--- .../src/public/setup_phase_manager.test.ts | 11 +- .../src/public/setup_phase_manager.ts | 13 +- .../src/public/tail_phase_manager.ts | 9 +- .../src/public/teardown_phase_manager.ts | 11 +- 21 files changed, 312 insertions(+), 410 deletions(-) delete mode 100644 yarn-project/simulator/src/avm/journal/host_storage.ts diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 6bca1202516..4742d6b531e 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -10,6 +10,7 @@ import { type Fieldable } from '@aztec/foundation/serialize'; import { randomInt } from 'crypto'; import { mock } from 'jest-mock-extended'; +import { type WorldStateDB } from '../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../public/side_effect_trace_interface.js'; import { type AvmContext } from './avm_context.js'; import { type AvmExecutionEnvironment } from './avm_execution_environment.js'; @@ -22,7 +23,6 @@ import { initContext, initExecutionEnvironment, initGlobalVariables, - initHostStorage, initMachineState, initPersistableStateManager, randomMemoryBytes, @@ -30,7 +30,6 @@ import { randomMemoryUint64s, resolveAvmTestContractAssertionMessage, } from './fixtures/index.js'; -import { type HostStorage } from './journal/host_storage.js'; import { type AvmPersistableStateManager } from './journal/journal.js'; import { Add, CalldataCopy, Return, Set } from './opcodes/index.js'; import { encodeToBytecode } from './serialization/bytecode_serialization.js'; @@ -375,14 +374,14 @@ describe('AVM simulator: transpiled Noir contracts', () => { const value0 = new Fr(420); const value1 = new Fr(69); - let hostStorage: HostStorage; + let worldStateDB: WorldStateDB; let trace: PublicSideEffectTraceInterface; let persistableState: AvmPersistableStateManager; beforeEach(() => { - hostStorage = initHostStorage(); + worldStateDB = mock(); trace = mock(); - persistableState = initPersistableStateManager({ hostStorage, trace }); + persistableState = initPersistableStateManager({ worldStateDB, trace }); }); const createContext = (calldata: Fr[] = []) => { @@ -409,7 +408,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const context = createContext(calldata); const bytecode = getAvmTestContractBytecode('note_hash_exists'); if (mockAtLeafIndex !== undefined) { - mockNoteHashExists(hostStorage, mockAtLeafIndex, value0); + mockNoteHashExists(worldStateDB, mockAtLeafIndex, value0); } const results = await new AvmSimulator(context).executeBytecode(bytecode); @@ -434,7 +433,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const bytecode = getAvmTestContractBytecode('nullifier_exists'); if (exists) { - mockNullifierExists(hostStorage, leafIndex, value0); + mockNullifierExists(worldStateDB, leafIndex, value0); } const results = await new AvmSimulator(context).executeBytecode(bytecode); @@ -473,7 +472,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const context = createContext(calldata); const bytecode = getAvmTestContractBytecode('l1_to_l2_msg_exists'); if (mockAtLeafIndex !== undefined) { - mockL1ToL2MessageExists(hostStorage, mockAtLeafIndex, value0, /*valueAtOtherIndices=*/ value1); + mockL1ToL2MessageExists(worldStateDB, mockAtLeafIndex, value0, /*valueAtOtherIndices=*/ value1); } const results = await new AvmSimulator(context).executeBytecode(bytecode); @@ -611,7 +610,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { it('Should read value in storage (single)', async () => { const context = createContext(); - mockStorageRead(hostStorage, value0); + mockStorageRead(worldStateDB, value0); const bytecode = getAvmTestContractBytecode('read_storage_single'); @@ -674,7 +673,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { [listSlot0.toBigInt(), value0], [listSlot1.toBigInt(), value1], ]); - mockStorageReadWithMap(hostStorage, mockedStorage); + mockStorageReadWithMap(worldStateDB, mockedStorage); const bytecode = getAvmTestContractBytecode('read_storage_list'); @@ -748,7 +747,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const calldata = [storageAddress]; const context = createContext(calldata); - mockStorageRead(hostStorage, value0); + mockStorageRead(worldStateDB, value0); const bytecode = getAvmTestContractBytecode('read_storage_map'); const results = await new AvmSimulator(context).executeBytecode(bytecode); @@ -780,7 +779,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { initializationHash: new Fr(0x101112), publicKeysHash: new Fr(0x161718), }; - mockGetContractInstance(hostStorage, contractInstance); + mockGetContractInstance(worldStateDB, contractInstance); const bytecode = getAvmTestContractBytecode('test_get_contract_instance_raw'); @@ -824,7 +823,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const context = createContext(calldata); const callBytecode = getAvmTestContractBytecode('nested_call_to_add'); const addBytecode = getAvmTestContractBytecode('add_args_return'); - mockGetBytecode(hostStorage, addBytecode); + mockGetBytecode(worldStateDB, addBytecode); const nestedTrace = mock(); mockTraceFork(trace, nestedTrace); @@ -840,7 +839,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const context = createContext(calldata); const callBytecode = getAvmTestContractBytecode('nested_static_call_to_add'); const addBytecode = getAvmTestContractBytecode('add_args_return'); - mockGetBytecode(hostStorage, addBytecode); + mockGetBytecode(worldStateDB, addBytecode); const nestedTrace = mock(); mockTraceFork(trace, nestedTrace); @@ -857,7 +856,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const context = createContext(calldata); const callBytecode = getAvmTestContractBytecode('nested_call_to_add_with_gas'); const addBytecode = getAvmTestContractBytecode('add_args_return'); - mockGetBytecode(hostStorage, addBytecode); + mockGetBytecode(worldStateDB, addBytecode); mockTraceFork(trace); const results = await new AvmSimulator(context).executeBytecode(callBytecode); @@ -876,7 +875,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const context = createContext(); const callBytecode = getAvmTestContractBytecode('nested_static_call_to_set_storage'); const nestedBytecode = getAvmTestContractBytecode('set_storage_single'); - mockGetBytecode(hostStorage, nestedBytecode); + mockGetBytecode(worldStateDB, nestedBytecode); mockTraceFork(trace); const results = await new AvmSimulator(context).executeBytecode(callBytecode); @@ -900,7 +899,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const callBytecode = getAvmTestContractBytecode('nested_call_to_add'); // We actually don't pass the function ADD, but it's ok because the signature is the same. const nestedBytecode = getAvmTestContractBytecode('assert_same'); - mockGetBytecode(hostStorage, nestedBytecode); + mockGetBytecode(worldStateDB, nestedBytecode); const results = await new AvmSimulator(context).executeBytecode(callBytecode); expect(results.reverted).toBe(true); // The outer call should revert. diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index 72bc6d66a7f..28092122612 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -10,20 +10,13 @@ import { strict as assert } from 'assert'; import { mock } from 'jest-mock-extended'; import merge from 'lodash.merge'; -import { - type CommitmentsDB, - type PublicContractsDB, - type PublicStateDB, - resolveAssertionMessage, - traverseCauseChain, -} from '../../index.js'; +import { type WorldStateDB, resolveAssertionMessage, traverseCauseChain } from '../../index.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { AvmContext } from '../avm_context.js'; import { AvmContextInputs, AvmExecutionEnvironment } from '../avm_execution_environment.js'; import { AvmMachineState } from '../avm_machine_state.js'; import { Field, Uint8, Uint64 } from '../avm_memory_types.js'; import { type AvmRevertReason } from '../errors.js'; -import { HostStorage } from '../journal/host_storage.js'; import { AvmPersistableStateManager } from '../journal/journal.js'; import { NullifierManager } from '../journal/nullifiers.js'; import { PublicStorage } from '../journal/public_storage.js'; @@ -43,32 +36,19 @@ export function initContext(overrides?: { ); } -/** Creates an empty host storage with mocked dbs. */ -export function initHostStorage(overrides?: { - publicDb?: PublicStateDB; - contractsDb?: PublicContractsDB; - commitmentsDb?: CommitmentsDB; -}): HostStorage { - return new HostStorage( - overrides?.publicDb || mock(), - overrides?.contractsDb || mock(), - overrides?.commitmentsDb || mock(), - ); -} - /** Creates an empty state manager with mocked host storage. */ export function initPersistableStateManager(overrides?: { - hostStorage?: HostStorage; + worldStateDB?: WorldStateDB; trace?: PublicSideEffectTraceInterface; publicStorage?: PublicStorage; nullifiers?: NullifierManager; }): AvmPersistableStateManager { - const hostStorage = overrides?.hostStorage || initHostStorage(); + const worldStateDB = overrides?.worldStateDB || mock(); return new AvmPersistableStateManager( - hostStorage, + worldStateDB, overrides?.trace || mock(), - overrides?.publicStorage || new PublicStorage(hostStorage.publicStateDb), - overrides?.nullifiers || new NullifierManager(hostStorage.commitmentsDb), + overrides?.publicStorage || new PublicStorage(worldStateDB), + overrides?.nullifiers || new NullifierManager(worldStateDB), ); } diff --git a/yarn-project/simulator/src/avm/journal/host_storage.ts b/yarn-project/simulator/src/avm/journal/host_storage.ts deleted file mode 100644 index 1064c82b5e3..00000000000 --- a/yarn-project/simulator/src/avm/journal/host_storage.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from '../../public/db_interfaces.js'; - -/** - * Host storage - * - * A wrapper around the node dbs - */ -export class HostStorage { - constructor( - public readonly publicStateDb: PublicStateDB, - public readonly contractsDb: PublicContractsDB, - public readonly commitmentsDb: CommitmentsDB, - ) {} -} diff --git a/yarn-project/simulator/src/avm/journal/index.ts b/yarn-project/simulator/src/avm/journal/index.ts index 86fe25115d8..5a3cc78b416 100644 --- a/yarn-project/simulator/src/avm/journal/index.ts +++ b/yarn-project/simulator/src/avm/journal/index.ts @@ -1,2 +1 @@ -export * from './host_storage.js'; export * from './journal.js'; diff --git a/yarn-project/simulator/src/avm/journal/journal.test.ts b/yarn-project/simulator/src/avm/journal/journal.test.ts index f3c18188d12..07efb46a647 100644 --- a/yarn-project/simulator/src/avm/journal/journal.test.ts +++ b/yarn-project/simulator/src/avm/journal/journal.test.ts @@ -4,8 +4,9 @@ import { SerializableContractInstance } from '@aztec/types/contracts'; import { mock } from 'jest-mock-extended'; +import { type WorldStateDB } from '../../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; -import { initHostStorage, initPersistableStateManager } from '../fixtures/index.js'; +import { initPersistableStateManager } from '../fixtures/index.js'; import { mockGetContractInstance, mockL1ToL2MessageExists, @@ -13,7 +14,6 @@ import { mockNullifierExists, mockStorageRead, } from '../test_utils.js'; -import { type HostStorage } from './host_storage.js'; import { type AvmPersistableStateManager } from './journal.js'; describe('journal', () => { @@ -21,14 +21,14 @@ describe('journal', () => { const utxo = Fr.random(); const leafIndex = Fr.random(); - let hostStorage: HostStorage; + let worldStateDB: WorldStateDB; let trace: PublicSideEffectTraceInterface; let persistableState: AvmPersistableStateManager; beforeEach(() => { - hostStorage = initHostStorage(); + worldStateDB = mock(); trace = mock(); - persistableState = initPersistableStateManager({ hostStorage, trace }); + persistableState = initPersistableStateManager({ worldStateDB, trace }); }); describe('Public Storage', () => { @@ -38,7 +38,7 @@ describe('journal', () => { const storedValue = new Fr(420); const cachedValue = new Fr(69); - mockStorageRead(hostStorage, storedValue); + mockStorageRead(worldStateDB, storedValue); // Get the cache first const cacheMissResult = await persistableState.readStorage(address, slot); @@ -83,7 +83,7 @@ describe('journal', () => { }); it('checkNoteHashExists works for existing note hashes', async () => { - mockNoteHashExists(hostStorage, leafIndex, utxo); + mockNoteHashExists(worldStateDB, leafIndex, utxo); const exists = await persistableState.checkNoteHashExists(address, utxo, leafIndex); expect(exists).toEqual(true); expect(trace.traceNoteHashCheck).toHaveBeenCalledTimes(1); @@ -110,7 +110,7 @@ describe('journal', () => { }); it('checkNullifierExists works for existing nullifiers', async () => { - mockNullifierExists(hostStorage, leafIndex, utxo); + mockNullifierExists(worldStateDB, leafIndex, utxo); const exists = await persistableState.checkNullifierExists(address, utxo); expect(exists).toEqual(true); expect(trace.traceNullifierCheck).toHaveBeenCalledTimes(1); @@ -130,7 +130,7 @@ describe('journal', () => { }); it('checkL1ToL2MessageExists works for existing message', async () => { - mockL1ToL2MessageExists(hostStorage, leafIndex, utxo); + mockL1ToL2MessageExists(worldStateDB, leafIndex, utxo); const exists = await persistableState.checkL1ToL2MessageExists(address, utxo, leafIndex); expect(exists).toEqual(true); expect(trace.traceL1ToL2MessageCheck).toHaveBeenCalledTimes(1); @@ -148,7 +148,7 @@ describe('journal', () => { describe('Getting contract instances', () => { it('Should get contract instance', async () => { const contractInstance = randomContractInstanceWithAddress(/*(base instance) opts=*/ {}, /*address=*/ address); - mockGetContractInstance(hostStorage, contractInstance); + mockGetContractInstance(worldStateDB, contractInstance); await persistableState.getContractInstance(address); expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); expect(trace.traceGetContractInstance).toHaveBeenCalledWith({ exists: true, ...contractInstance }); diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 2f869c76a7b..843b6ed90ae 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -3,11 +3,11 @@ import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { SerializableContractInstance } from '@aztec/types/contracts'; +import { type WorldStateDB } from '../../public/public_db_sources.js'; import { type TracedContractInstance } from '../../public/side_effect_trace.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { type AvmContractCallResult } from '../avm_contract_call_result.js'; import { type AvmExecutionEnvironment } from '../avm_execution_environment.js'; -import { type HostStorage } from './host_storage.js'; import { NullifierManager } from './nullifiers.js'; import { PublicStorage } from './public_storage.js'; @@ -25,7 +25,7 @@ export class AvmPersistableStateManager { constructor( /** Reference to node storage */ - private readonly hostStorage: HostStorage, + private readonly worldStateDB: WorldStateDB, /** Side effect trace */ private readonly trace: PublicSideEffectTraceInterface, /** Public storage, including cached writes */ @@ -39,18 +39,15 @@ export class AvmPersistableStateManager { * Create a new state manager with some preloaded pending siloed nullifiers */ public static newWithPendingSiloedNullifiers( - hostStorage: HostStorage, + worldStateDB: WorldStateDB, trace: PublicSideEffectTraceInterface, pendingSiloedNullifiers: Fr[], ) { - const parentNullifiers = NullifierManager.newWithPendingSiloedNullifiers( - hostStorage.commitmentsDb, - pendingSiloedNullifiers, - ); + const parentNullifiers = NullifierManager.newWithPendingSiloedNullifiers(worldStateDB, pendingSiloedNullifiers); return new AvmPersistableStateManager( - hostStorage, + worldStateDB, trace, - /*publicStorage=*/ new PublicStorage(hostStorage.publicStateDb), + /*publicStorage=*/ new PublicStorage(worldStateDB), /*nullifiers=*/ parentNullifiers.fork(), ); } @@ -60,7 +57,7 @@ export class AvmPersistableStateManager { */ public fork() { return new AvmPersistableStateManager( - this.hostStorage, + this.worldStateDB, this.trace.fork(), this.publicStorage.fork(), this.nullifiers.fork(), @@ -122,7 +119,7 @@ export class AvmPersistableStateManager { * @returns true if the note hash exists at the given leaf index, false otherwise */ public async checkNoteHashExists(storageAddress: Fr, noteHash: Fr, leafIndex: Fr): Promise { - const gotLeafValue = (await this.hostStorage.commitmentsDb.getCommitmentValue(leafIndex.toBigInt())) ?? Fr.ZERO; + const gotLeafValue = (await this.worldStateDB.getCommitmentValue(leafIndex.toBigInt())) ?? Fr.ZERO; const exists = gotLeafValue.equals(noteHash); this.log.debug( `noteHashes(${storageAddress})@${noteHash} ?? leafIndex: ${leafIndex} | gotLeafValue: ${gotLeafValue}, exists: ${exists}.`, @@ -177,7 +174,7 @@ export class AvmPersistableStateManager { * @returns exists - whether the message exists in the L1 to L2 Messages tree */ public async checkL1ToL2MessageExists(contractAddress: Fr, msgHash: Fr, msgLeafIndex: Fr): Promise { - const valueAtIndex = (await this.hostStorage.commitmentsDb.getL1ToL2LeafValue(msgLeafIndex.toBigInt())) ?? Fr.ZERO; + const valueAtIndex = (await this.worldStateDB.getL1ToL2LeafValue(msgLeafIndex.toBigInt())) ?? Fr.ZERO; const exists = valueAtIndex.equals(msgHash); this.log.debug( `l1ToL2Messages(@${msgLeafIndex}) ?? exists: ${exists}, expected: ${msgHash}, found: ${valueAtIndex}.`, @@ -217,7 +214,7 @@ export class AvmPersistableStateManager { public async getContractInstance(contractAddress: Fr): Promise { let exists = true; const aztecAddress = AztecAddress.fromField(contractAddress); - let instance = await this.hostStorage.contractsDb.getContractInstance(aztecAddress); + let instance = await this.worldStateDB.getContractInstance(aztecAddress); if (instance === undefined) { instance = SerializableContractInstance.empty().withAddress(aztecAddress); exists = false; @@ -242,7 +239,7 @@ export class AvmPersistableStateManager { * Get a contract's bytecode from the contracts DB */ public async getBytecode(contractAddress: AztecAddress, selector: FunctionSelector): Promise { - return await this.hostStorage.contractsDb.getBytecode(contractAddress, selector); + return await this.worldStateDB.getBytecode(contractAddress, selector); } /** @@ -260,10 +257,8 @@ export class AvmPersistableStateManager { this.acceptNestedCallState(nestedState); } const functionName = - (await nestedState.hostStorage.contractsDb.getDebugFunctionName( - nestedEnvironment.address, - nestedEnvironment.functionSelector, - )) ?? `${nestedEnvironment.address}:${nestedEnvironment.functionSelector}`; + (await this.worldStateDB.getDebugFunctionName(nestedEnvironment.address, nestedEnvironment.functionSelector)) ?? + `${nestedEnvironment.address}:${nestedEnvironment.functionSelector}`; this.log.verbose(`[AVM] Calling nested function ${functionName}`); diff --git a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts index aab2ae6efa6..442bb985ba3 100644 --- a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts @@ -2,17 +2,12 @@ import { Fr } from '@aztec/circuits.js'; import { mock } from 'jest-mock-extended'; +import { type WorldStateDB } from '../../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { type AvmContext } from '../avm_context.js'; import { Field, Uint8, Uint32 } from '../avm_memory_types.js'; import { InstructionExecutionError, StaticCallAlterationError } from '../errors.js'; -import { - initContext, - initExecutionEnvironment, - initHostStorage, - initPersistableStateManager, -} from '../fixtures/index.js'; -import { type HostStorage } from '../journal/host_storage.js'; +import { initContext, initExecutionEnvironment, initPersistableStateManager } from '../fixtures/index.js'; import { type AvmPersistableStateManager } from '../journal/journal.js'; import { mockL1ToL2MessageExists, mockNoteHashExists, mockNullifierExists } from '../test_utils.js'; import { @@ -26,7 +21,7 @@ import { } from './accrued_substate.js'; describe('Accrued Substate', () => { - let hostStorage: HostStorage; + let worldStateDB: WorldStateDB; let trace: PublicSideEffectTraceInterface; let persistableState: AvmPersistableStateManager; let context: AvmContext; @@ -43,9 +38,9 @@ describe('Accrued Substate', () => { const existsOffset = 2; beforeEach(() => { - hostStorage = initHostStorage(); + worldStateDB = mock(); trace = mock(); - persistableState = initPersistableStateManager({ hostStorage, trace }); + persistableState = initPersistableStateManager({ worldStateDB, trace }); context = initContext({ persistableState, env: initExecutionEnvironment({ address, storageAddress, sender }) }); }); @@ -83,7 +78,7 @@ describe('Accrued Substate', () => { : ''; it(`Should return ${expectFound} (and be traced) when noteHash ${existsStr} ${foundAtStr}`, async () => { if (mockAtLeafIndex !== undefined) { - mockNoteHashExists(hostStorage, mockAtLeafIndex, value0); + mockNoteHashExists(worldStateDB, mockAtLeafIndex, value0); } context.machineState.memory.set(value0Offset, new Field(value0)); // noteHash @@ -160,7 +155,7 @@ describe('Accrued Substate', () => { const storageAddressOffset = 1; if (exists) { - mockNullifierExists(hostStorage, leafIndex, value0); + mockNullifierExists(worldStateDB, leafIndex, value0); } context.machineState.memory.set(value0Offset, new Field(value0)); // nullifier @@ -229,7 +224,7 @@ describe('Accrued Substate', () => { }); it('Nullifier collision reverts (nullifier exists in host state)', async () => { - mockNullifierExists(hostStorage, leafIndex); // db will say that nullifier already exists + mockNullifierExists(worldStateDB, leafIndex); // db will say that nullifier already exists context.machineState.memory.set(value0Offset, new Field(value0)); await expect(new EmitNullifier(/*indirect=*/ 0, /*offset=*/ value0Offset).execute(context)).rejects.toThrow( new InstructionExecutionError( @@ -275,7 +270,7 @@ describe('Accrued Substate', () => { it(`Should return ${expectFound} (and be traced) when noteHash ${existsStr} ${foundAtStr}`, async () => { if (mockAtLeafIndex !== undefined) { - mockL1ToL2MessageExists(hostStorage, mockAtLeafIndex, value0, /*valueAtOtherIndices=*/ value1); + mockL1ToL2MessageExists(worldStateDB, mockAtLeafIndex, value0, /*valueAtOtherIndices=*/ value1); } context.machineState.memory.set(value0Offset, new Field(value0)); // noteHash diff --git a/yarn-project/simulator/src/avm/opcodes/contract.test.ts b/yarn-project/simulator/src/avm/opcodes/contract.test.ts index ced3a000d64..6ce6eefac99 100644 --- a/yarn-project/simulator/src/avm/opcodes/contract.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/contract.test.ts @@ -4,11 +4,11 @@ import { SerializableContractInstance } from '@aztec/types/contracts'; import { mock } from 'jest-mock-extended'; +import { type WorldStateDB } from '../../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { type AvmContext } from '../avm_context.js'; import { Field } from '../avm_memory_types.js'; -import { initContext, initHostStorage, initPersistableStateManager } from '../fixtures/index.js'; -import { type HostStorage } from '../journal/host_storage.js'; +import { initContext, initPersistableStateManager } from '../fixtures/index.js'; import { type AvmPersistableStateManager } from '../journal/journal.js'; import { mockGetContractInstance } from '../test_utils.js'; import { GetContractInstance } from './contract.js'; @@ -16,15 +16,15 @@ import { GetContractInstance } from './contract.js'; describe('Contract opcodes', () => { const address = AztecAddress.random(); - let hostStorage: HostStorage; + let worldStateDB: WorldStateDB; let trace: PublicSideEffectTraceInterface; let persistableState: AvmPersistableStateManager; let context: AvmContext; beforeEach(() => { - hostStorage = initHostStorage(); + worldStateDB = mock(); trace = mock(); - persistableState = initPersistableStateManager({ hostStorage, trace }); + persistableState = initPersistableStateManager({ worldStateDB, trace }); context = initContext({ persistableState }); }); @@ -48,7 +48,7 @@ describe('Contract opcodes', () => { it('should copy contract instance to memory if found', async () => { const contractInstance = randomContractInstanceWithAddress(/*(base instance) opts=*/ {}, /*address=*/ address); - mockGetContractInstance(hostStorage, contractInstance); + mockGetContractInstance(worldStateDB, contractInstance); context.machineState.memory.set(0, new Field(address.toField())); await new GetContractInstance(/*indirect=*/ 0, /*addressOffset=*/ 0, /*dstOffset=*/ 1).execute(context); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index ce6bbe4a7f7..638e7913d45 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -2,12 +2,12 @@ import { Fr } from '@aztec/foundation/fields'; import { mock } from 'jest-mock-extended'; +import { type WorldStateDB } from '../../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { type AvmContext } from '../avm_context.js'; import { Field, TypeTag, Uint8, Uint32 } from '../avm_memory_types.js'; import { markBytecodeAsAvm } from '../bytecode_utils.js'; -import { adjustCalldataIndex, initContext, initHostStorage, initPersistableStateManager } from '../fixtures/index.js'; -import { type HostStorage } from '../journal/host_storage.js'; +import { adjustCalldataIndex, initContext, initPersistableStateManager } from '../fixtures/index.js'; import { type AvmPersistableStateManager } from '../journal/journal.js'; import { encodeToBytecode } from '../serialization/bytecode_serialization.js'; import { Opcode } from '../serialization/instruction_serialization.js'; @@ -20,15 +20,15 @@ import { SStore } from './storage.js'; describe('External Calls', () => { let context: AvmContext; - let hostStorage: HostStorage; + let worldStateDB: WorldStateDB; let trace: PublicSideEffectTraceInterface; let persistableState: AvmPersistableStateManager; beforeEach(() => { - hostStorage = initHostStorage(); + worldStateDB = mock(); trace = mock(); - persistableState = initPersistableStateManager({ hostStorage, trace }); - context = initContext({ persistableState: persistableState }); + persistableState = initPersistableStateManager({ worldStateDB, trace }); + context = initContext({ persistableState }); mockTraceFork(trace); // make sure trace.fork() works on nested call }); @@ -94,7 +94,7 @@ describe('External Calls', () => { new Return(/*indirect=*/ 0, /*retOffset=*/ 0, /*size=*/ 2), ]), ); - mockGetBytecode(hostStorage, otherContextInstructionsBytecode); + mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -148,7 +148,7 @@ describe('External Calls', () => { new Return(/*indirect=*/ 0, /*retOffset=*/ 0, /*size=*/ 1), ]), ); - mockGetBytecode(hostStorage, otherContextInstructionsBytecode); + mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -235,7 +235,7 @@ describe('External Calls', () => { ]; const otherContextInstructionsBytecode = markBytecodeAsAvm(encodeToBytecode(otherContextInstructions)); - mockGetBytecode(hostStorage, otherContextInstructionsBytecode); + mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); const instruction = new StaticCall( /*indirect=*/ 0, diff --git a/yarn-project/simulator/src/avm/test_utils.ts b/yarn-project/simulator/src/avm/test_utils.ts index f7dda9f6593..65862759fb5 100644 --- a/yarn-project/simulator/src/avm/test_utils.ts +++ b/yarn-project/simulator/src/avm/test_utils.ts @@ -4,12 +4,11 @@ import { type ContractInstanceWithAddress } from '@aztec/types/contracts'; import { type jest } from '@jest/globals'; import { mock } from 'jest-mock-extended'; -import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from '../public/db_interfaces.js'; +import { type WorldStateDB } from '../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../public/side_effect_trace_interface.js'; -import { type HostStorage } from './journal/host_storage.js'; -export function mockGetBytecode(hs: HostStorage, bytecode: Buffer) { - (hs as jest.Mocked).contractsDb.getBytecode.mockResolvedValue(bytecode); +export function mockGetBytecode(worldStateDB: WorldStateDB, bytecode: Buffer) { + (worldStateDB as jest.Mocked).getBytecode.mockResolvedValue(bytecode); } export function mockTraceFork(trace: PublicSideEffectTraceInterface, nestedTrace?: PublicSideEffectTraceInterface) { @@ -18,18 +17,18 @@ export function mockTraceFork(trace: PublicSideEffectTraceInterface, nestedTrace ); } -export function mockStorageRead(hs: HostStorage, value: Fr) { - (hs.publicStateDb as jest.Mocked).storageRead.mockResolvedValue(value); +export function mockStorageRead(worldStateDB: WorldStateDB, value: Fr) { + (worldStateDB as jest.Mocked).storageRead.mockResolvedValue(value); } -export function mockStorageReadWithMap(hs: HostStorage, mockedStorage: Map) { - (hs.publicStateDb as jest.Mocked).storageRead.mockImplementation((_address, slot) => +export function mockStorageReadWithMap(worldStateDB: WorldStateDB, mockedStorage: Map) { + (worldStateDB as jest.Mocked).storageRead.mockImplementation((_address, slot) => Promise.resolve(mockedStorage.get(slot.toBigInt()) ?? Fr.ZERO), ); } -export function mockNoteHashExists(hs: HostStorage, _leafIndex: Fr, value?: Fr) { - (hs.commitmentsDb as jest.Mocked).getCommitmentValue.mockImplementation((index: bigint) => { +export function mockNoteHashExists(worldStateDB: WorldStateDB, _leafIndex: Fr, value?: Fr) { + (worldStateDB as jest.Mocked).getCommitmentValue.mockImplementation((index: bigint) => { if (index == _leafIndex.toBigInt()) { return Promise.resolve(value); } else { @@ -39,12 +38,17 @@ export function mockNoteHashExists(hs: HostStorage, _leafIndex: Fr, value?: Fr) }); } -export function mockNullifierExists(hs: HostStorage, leafIndex: Fr, _value?: Fr) { - (hs.commitmentsDb as jest.Mocked).getNullifierIndex.mockResolvedValue(leafIndex.toBigInt()); +export function mockNullifierExists(worldStateDB: WorldStateDB, leafIndex: Fr, _value?: Fr) { + (worldStateDB as jest.Mocked).getNullifierIndex.mockResolvedValue(leafIndex.toBigInt()); } -export function mockL1ToL2MessageExists(hs: HostStorage, leafIndex: Fr, value: Fr, valueAtOtherIndices?: Fr) { - (hs.commitmentsDb as jest.Mocked).getL1ToL2LeafValue.mockImplementation((index: bigint) => { +export function mockL1ToL2MessageExists( + worldStateDB: WorldStateDB, + leafIndex: Fr, + value: Fr, + valueAtOtherIndices?: Fr, +) { + (worldStateDB as jest.Mocked).getL1ToL2LeafValue.mockImplementation((index: bigint) => { if (index == leafIndex.toBigInt()) { return Promise.resolve(value); } else { @@ -55,6 +59,6 @@ export function mockL1ToL2MessageExists(hs: HostStorage, leafIndex: Fr, value: F }); } -export function mockGetContractInstance(hs: HostStorage, contractInstance: ContractInstanceWithAddress) { - (hs.contractsDb as jest.Mocked).getContractInstance.mockResolvedValue(contractInstance); +export function mockGetContractInstance(worldStateDB: WorldStateDB, contractInstance: ContractInstanceWithAddress) { + (worldStateDB as jest.Mocked).getContractInstance.mockResolvedValue(contractInstance); } diff --git a/yarn-project/simulator/src/public/app_logic_phase_manager.ts b/yarn-project/simulator/src/public/app_logic_phase_manager.ts index 9809e4cb226..5d78b366766 100644 --- a/yarn-project/simulator/src/public/app_logic_phase_manager.ts +++ b/yarn-project/simulator/src/public/app_logic_phase_manager.ts @@ -1,11 +1,11 @@ import { PublicKernelType, type PublicProvingRequest, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor, type PublicStateDB } from '@aztec/simulator'; +import { type PublicExecutor } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { AbstractPhaseManager, makeAvmProvingRequest } from './abstract_phase_manager.js'; -import { type ContractsDataSourcePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; /** @@ -18,8 +18,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { publicKernel: PublicKernelCircuitSimulator, globalVariables: GlobalVariables, historicalHeader: Header, - protected publicContractsDB: ContractsDataSourcePublicDB, - protected publicStateDB: PublicStateDB, + protected worldStateDB: WorldStateDB, phase: PublicKernelType = PublicKernelType.APP_LOGIC, ) { super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); @@ -37,7 +36,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { // TODO(@spalladino): Should we allow emitting contracts in the fee preparation phase? // TODO(#6464): Should we allow emitting contracts in the private setup phase? // if so, this should only add contracts that were deployed during private app logic. - await this.publicContractsDB.addNewContracts(tx); + await this.worldStateDB.addNewContracts(tx); const { publicProvingInformation, kernelOutput, @@ -49,7 +48,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { } = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousCircuit).catch( // if we throw for any reason other than simulation, we need to rollback and drop the TX async err => { - await this.publicStateDB.rollbackToCommit(); + await this.worldStateDB.rollbackToCommit(); throw err; }, ); @@ -57,8 +56,8 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { if (revertReason) { // TODO(#6464): Should we allow emitting contracts in the private setup phase? // if so, this is removing contracts deployed in private setup - await this.publicContractsDB.removeNewContracts(tx); - await this.publicStateDB.rollbackToCheckpoint(); + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCheckpoint(); tx.filterRevertedLogs(kernelOutput); } else { tx.unencryptedLogs.addFunctionLogs(newUnencryptedLogs); diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index f3a8a13ebce..52f664df436 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -9,11 +9,10 @@ import { AvmContext } from '../avm/avm_context.js'; import { AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; import { AvmMachineState } from '../avm/avm_machine_state.js'; import { AvmSimulator } from '../avm/avm_simulator.js'; -import { HostStorage } from '../avm/journal/host_storage.js'; import { AvmPersistableStateManager } from '../avm/journal/index.js'; -import { type CommitmentsDB, type PublicContractsDB, type PublicStateDB } from './db_interfaces.js'; import { type PublicExecutionResult } from './execution.js'; import { ExecutorMetrics } from './executor_metrics.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { PublicSideEffectTrace } from './side_effect_trace.js'; /** @@ -22,13 +21,7 @@ import { PublicSideEffectTrace } from './side_effect_trace.js'; export class PublicExecutor { metrics: ExecutorMetrics; - constructor( - private readonly publicStorageDB: PublicStateDB, - private readonly contractsDb: PublicContractsDB, - private readonly commitmentsDb: CommitmentsDB, - private readonly header: Header, - client: TelemetryClient, - ) { + constructor(private readonly worldStateDB: WorldStateDB, private readonly header: Header, client: TelemetryClient) { this.metrics = new ExecutorMetrics(client, 'PublicExecutor'); } @@ -56,15 +49,14 @@ export class PublicExecutor { ): Promise { const address = executionRequest.contractAddress; const selector = executionRequest.callContext.functionSelector; - const fnName = (await this.contractsDb.getDebugFunctionName(address, selector)) ?? `${address}:${selector}`; + const fnName = (await this.worldStateDB.getDebugFunctionName(address, selector)) ?? `${address}:${selector}`; PublicExecutor.log.verbose(`[AVM] Executing public external function ${fnName}.`); const timer = new Timer(); - const hostStorage = new HostStorage(this.publicStorageDB, this.contractsDb, this.commitmentsDb); const trace = new PublicSideEffectTrace(startSideEffectCounter); const avmPersistableState = AvmPersistableStateManager.newWithPendingSiloedNullifiers( - hostStorage, + this.worldStateDB, trace, pendingSiloedNullifiers.map(n => n.value), ); diff --git a/yarn-project/simulator/src/public/phase_manager_factory.ts b/yarn-project/simulator/src/public/phase_manager_factory.ts index 5a71ff74863..7c42be49b9b 100644 --- a/yarn-project/simulator/src/public/phase_manager_factory.ts +++ b/yarn-project/simulator/src/public/phase_manager_factory.ts @@ -1,11 +1,11 @@ import { PublicKernelType, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; -import { type PublicExecutor, type PublicStateDB } from '@aztec/simulator'; +import { type PublicExecutor } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { type AbstractPhaseManager } from './abstract_phase_manager.js'; import { AppLogicPhaseManager } from './app_logic_phase_manager.js'; -import { type ContractsDataSourcePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; import { SetupPhaseManager } from './setup_phase_manager.js'; import { TailPhaseManager } from './tail_phase_manager.js'; @@ -31,20 +31,11 @@ export class PhaseManagerFactory { publicKernel: PublicKernelCircuitSimulator, globalVariables: GlobalVariables, historicalHeader: Header, - publicContractsDB: ContractsDataSourcePublicDB, - publicStateDB: PublicStateDB, + worldStateDB: WorldStateDB, ): AbstractPhaseManager | undefined { const data = tx.data.forPublic!; if (data.needsSetup) { - return new SetupPhaseManager( - db, - publicExecutor, - publicKernel, - globalVariables, - historicalHeader, - publicContractsDB, - publicStateDB, - ); + return new SetupPhaseManager(db, publicExecutor, publicKernel, globalVariables, historicalHeader, worldStateDB); } else if (data.needsAppLogic) { return new AppLogicPhaseManager( db, @@ -52,8 +43,7 @@ export class PhaseManagerFactory { publicKernel, globalVariables, historicalHeader, - publicContractsDB, - publicStateDB, + worldStateDB, ); } else if (data.needsTeardown) { return new TeardownPhaseManager( @@ -62,8 +52,7 @@ export class PhaseManagerFactory { publicKernel, globalVariables, historicalHeader, - publicContractsDB, - publicStateDB, + worldStateDB, ); } else { return undefined; @@ -78,8 +67,7 @@ export class PhaseManagerFactory { publicKernel: PublicKernelCircuitSimulator, globalVariables: GlobalVariables, historicalHeader: Header, - publicContractsDB: ContractsDataSourcePublicDB, - publicStateDB: PublicStateDB, + worldStateDB: WorldStateDB, ): AbstractPhaseManager | undefined { if (output.needsSetup) { throw new CannotTransitionToSetupError(); @@ -93,8 +81,7 @@ export class PhaseManagerFactory { publicKernel, globalVariables, historicalHeader, - publicContractsDB, - publicStateDB, + worldStateDB, ); } else if (output.needsTeardown) { if (currentPhaseManager.phase === PublicKernelType.TEARDOWN) { @@ -106,19 +93,10 @@ export class PhaseManagerFactory { publicKernel, globalVariables, historicalHeader, - publicContractsDB, - publicStateDB, + worldStateDB, ); } else if (currentPhaseManager.phase !== PublicKernelType.TAIL) { - return new TailPhaseManager( - db, - publicExecutor, - publicKernel, - globalVariables, - historicalHeader, - publicContractsDB, - publicStateDB, - ); + return new TailPhaseManager(db, publicExecutor, publicKernel, globalVariables, historicalHeader, worldStateDB); } else { return undefined; } diff --git a/yarn-project/simulator/src/public/public_db_sources.test.ts b/yarn-project/simulator/src/public/public_db_sources.test.ts index d3a66358d1d..02d8355ecb1 100644 --- a/yarn-project/simulator/src/public/public_db_sources.test.ts +++ b/yarn-project/simulator/src/public/public_db_sources.test.ts @@ -1,22 +1,27 @@ +// TODO(md): fix this test import { MerkleTreeId } from '@aztec/circuit-types'; import { AztecAddress, Fr, PublicDataTreeLeafPreimage } from '@aztec/circuits.js'; import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { WorldStatePublicDB } from './public_db_sources.js'; +import { WorldStateDB } from './public_db_sources.js'; const DB_VALUES_SIZE = 10; describe('world_state_public_db', () => { let db: MockProxy; + const contractDataSource: MockProxy = mock(); let dbStorage: Map>; let addresses: AztecAddress[]; let slots: Fr[]; let dbValues: Fr[]; + let worldStateDB: WorldStateDB; + beforeEach(() => { addresses = Array(DB_VALUES_SIZE).fill(0).map(AztecAddress.random); slots = Array(DB_VALUES_SIZE).fill(0).map(Fr.random); @@ -68,153 +73,147 @@ describe('world_state_public_db', () => { return Promise.resolve(PublicDataTreeLeafPreimage.fromBuffer(tree.get(index)!)); }); + + worldStateDB = new WorldStateDB(db, contractDataSource); }); it('reads unwritten value from merkle tree db', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); - expect(await publicStateDb.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); }); it('reads uncommitted value back', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); const newValue = new Fr(dbValues[0].toBigInt() + 1n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue); // should read back the uncommitted value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); // other slots should be unchanged - expect(await publicStateDb.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); + expect(await worldStateDB.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); }); it('reads committed value back', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); const newValue = new Fr(dbValues[0].toBigInt() + 1n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue); // commit the data - await publicStateDb.commit(); + await worldStateDB.commit(); // should read back the committed value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); // other slots should be unchanged - expect(await publicStateDb.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); + expect(await worldStateDB.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); }); it('will not rollback a committed value', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); const newValue = new Fr(dbValues[0].toBigInt() + 1n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue); // commit the data - await publicStateDb.commit(); + await worldStateDB.commit(); // should read back the committed value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); - await publicStateDb.rollbackToCommit(); + await worldStateDB.rollbackToCommit(); // should still read back the committed value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); }); it('reads original value if rolled back uncommitted value', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); const newValue = new Fr(dbValues[0].toBigInt() + 1n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue); // should read back the uncommitted value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); // now rollback - await publicStateDb.rollbackToCommit(); + await worldStateDB.rollbackToCommit(); // should now read the original value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); }); it('reads newly uncommitted value back', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); const newValue = new Fr(dbValues[0].toBigInt() + 1n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue); // commit the data - await publicStateDb.commit(); + await worldStateDB.commit(); // should read back the committed value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); // other slots should be unchanged - expect(await publicStateDb.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); + expect(await worldStateDB.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); // now update the slot again const newValue2 = new Fr(dbValues[0].toBigInt() + 2n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue2); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue2); // should read back the uncommitted value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue2); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue2); }); it('rolls back to previously committed value', async function () { - const publicStateDb = new WorldStatePublicDB(db); - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(dbValues[0]); const newValue = new Fr(dbValues[0].toBigInt() + 1n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue); // commit the data - await publicStateDb.commit(); + await worldStateDB.commit(); // should read back the committed value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); // other slots should be unchanged - expect(await publicStateDb.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); + expect(await worldStateDB.storageRead(addresses[1], slots[1])).toEqual(dbValues[1]); // now update the slot again const newValue2 = new Fr(dbValues[0].toBigInt() + 2n); // write a new value to our first value - await publicStateDb.storageWrite(addresses[0], slots[0], newValue2); + await worldStateDB.storageWrite(addresses[0], slots[0], newValue2); // should read back the uncommitted value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue2); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue2); // rollback - await publicStateDb.rollbackToCommit(); + await worldStateDB.rollbackToCommit(); // should read back the previously committed value - expect(await publicStateDb.storageRead(addresses[0], slots[0])).toEqual(newValue); + expect(await worldStateDB.storageRead(addresses[0], slots[0])).toEqual(newValue); }); it('can use checkpoints', async function () { - const publicStateDb = new WorldStatePublicDB(db); - const read = () => publicStateDb.storageRead(addresses[0], slots[0]); - const write = (value: Fr) => publicStateDb.storageWrite(addresses[0], slots[0], value); + const read = () => worldStateDB.storageRead(addresses[0], slots[0]); + const write = (value: Fr) => worldStateDB.storageWrite(addresses[0], slots[0], value); const newValue = new Fr(dbValues[0].toBigInt() + 1n); const newValue2 = new Fr(dbValues[0].toBigInt() + 2n); @@ -226,32 +225,32 @@ describe('world_state_public_db', () => { // basic expect(await read()).toEqual(dbValues[0]); await write(newValue); - await publicStateDb.checkpoint(); + await worldStateDB.checkpoint(); await write(newValue2); - await publicStateDb.rollbackToCheckpoint(); + await worldStateDB.rollbackToCheckpoint(); expect(await read()).toEqual(newValue); - await publicStateDb.rollbackToCommit(); + await worldStateDB.rollbackToCommit(); expect(await read()).toEqual(dbValues[0]); // write, checkpoint, commit, rollback to checkpoint, rollback to commit await write(newValue3); - await publicStateDb.checkpoint(); - await publicStateDb.rollbackToCheckpoint(); + await worldStateDB.checkpoint(); + await worldStateDB.rollbackToCheckpoint(); expect(await read()).toEqual(newValue3); - await publicStateDb.commit(); - await publicStateDb.rollbackToCommit(); + await worldStateDB.commit(); + await worldStateDB.rollbackToCommit(); expect(await read()).toEqual(newValue3); // writes after checkpoint take precedence await write(newValue4); - await publicStateDb.checkpoint(); + await worldStateDB.checkpoint(); await write(newValue5); - await publicStateDb.commit(); + await worldStateDB.commit(); expect(await read()).toEqual(newValue5); // rollback to checkpoint does not cross commit boundaries await write(newValue6); - await publicStateDb.rollbackToCheckpoint(); + await worldStateDB.rollbackToCheckpoint(); expect(await read()).toEqual(newValue5); }); }); diff --git a/yarn-project/simulator/src/public/public_db_sources.ts b/yarn-project/simulator/src/public/public_db_sources.ts index 662ccae10c3..949daffe240 100644 --- a/yarn-project/simulator/src/public/public_db_sources.ts +++ b/yarn-project/simulator/src/public/public_db_sources.ts @@ -38,7 +38,7 @@ export class ContractsDataSourcePublicDB implements PublicContractsDB { private log = createDebugLogger('aztec:sequencer:contracts-data-source'); - constructor(private db: ContractDataSource) {} + constructor(private dataSource: ContractDataSource) {} /** * Add new contracts from a transaction @@ -78,11 +78,11 @@ export class ContractsDataSourcePublicDB implements PublicContractsDB { } public async getContractInstance(address: AztecAddress): Promise { - return this.instanceCache.get(address.toString()) ?? (await this.db.getContract(address)); + return this.instanceCache.get(address.toString()) ?? (await this.dataSource.getContract(address)); } public async getContractClass(contractClassId: Fr): Promise { - return this.classCache.get(contractClassId.toString()) ?? (await this.db.getContractClass(contractClassId)); + return this.classCache.get(contractClassId.toString()) ?? (await this.dataSource.getContractClass(contractClassId)); } async getBytecode(address: AztecAddress, selector: FunctionSelector): Promise { @@ -98,7 +98,7 @@ export class ContractsDataSourcePublicDB implements PublicContractsDB { } public async getDebugFunctionName(address: AztecAddress, selector: FunctionSelector): Promise { - const artifact = await this.db.getContractArtifact(address); + const artifact = await this.dataSource.getContractArtifact(address); if (!artifact) { return Promise.resolve(undefined); } @@ -115,14 +115,18 @@ export class ContractsDataSourcePublicDB implements PublicContractsDB { } /** - * Implements the PublicStateDB using a world-state database. + * A public state DB that reads and writes to the world state. */ -export class WorldStatePublicDB implements PublicStateDB { +export class WorldStateDB extends ContractsDataSourcePublicDB implements PublicStateDB, CommitmentsDB { + private logger = createDebugLogger('aztec:sequencer:world-state-db'); + private committedWriteCache: Map = new Map(); private checkpointedWriteCache: Map = new Map(); private uncommittedWriteCache: Map = new Map(); - constructor(private db: MerkleTreeOperations) {} + constructor(private db: MerkleTreeOperations, dataSource: ContractDataSource) { + super(dataSource); + } /** * Reads a value from public storage, returning zero if none. @@ -171,53 +175,6 @@ export class WorldStatePublicDB implements PublicStateDB { return Promise.resolve(index); } - /** - * Commit the pending changes to the DB. - * @returns Nothing. - */ - commit(): Promise { - for (const [k, v] of this.checkpointedWriteCache) { - this.committedWriteCache.set(k, v); - } - // uncommitted writes take precedence over checkpointed writes - // since they are the most recent - for (const [k, v] of this.uncommittedWriteCache) { - this.committedWriteCache.set(k, v); - } - return this.rollbackToCommit(); - } - - /** - * Rollback the pending changes. - * @returns Nothing. - */ - async rollbackToCommit(): Promise { - await this.rollbackToCheckpoint(); - this.checkpointedWriteCache = new Map(); - return Promise.resolve(); - } - - checkpoint(): Promise { - for (const [k, v] of this.uncommittedWriteCache) { - this.checkpointedWriteCache.set(k, v); - } - return this.rollbackToCheckpoint(); - } - - rollbackToCheckpoint(): Promise { - this.uncommittedWriteCache = new Map(); - return Promise.resolve(); - } -} - -/** - * Implements WorldState db using a world state database. - */ -export class WorldStateDB implements CommitmentsDB { - private log = createDebugLogger('aztec:sequencer:world-state-db'); - - constructor(private db: MerkleTreeOperations) {} - public async getNullifierMembershipWitnessAtLatestBlock( nullifier: Fr, ): Promise { @@ -239,7 +196,7 @@ export class WorldStateDB implements CommitmentsDB { return undefined; } - this.log.debug(`[DB] Fetched nullifier membership`, { + this.logger.debug(`[DB] Fetched nullifier membership`, { eventName: 'public-db-access', duration: timer.ms(), operation: 'get-nullifier-membership-witness-at-latest-block', @@ -277,7 +234,7 @@ export class WorldStateDB implements CommitmentsDB { messageIndex, ); - this.log.debug(`[DB] Fetched L1 to L2 message membership`, { + this.logger.debug(`[DB] Fetched L1 to L2 message membership`, { eventName: 'public-db-access', duration: timer.ms(), operation: 'get-l1-to-l2-message-membership-witness', @@ -289,7 +246,7 @@ export class WorldStateDB implements CommitmentsDB { public async getL1ToL2LeafValue(leafIndex: bigint): Promise { const timer = new Timer(); const leafValue = await this.db.getLeafValue(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, leafIndex); - this.log.debug(`[DB] Fetched L1 to L2 message leaf value`, { + this.logger.debug(`[DB] Fetched L1 to L2 message leaf value`, { eventName: 'public-db-access', duration: timer.ms(), operation: 'get-l1-to-l2-message-leaf-value', @@ -300,7 +257,7 @@ export class WorldStateDB implements CommitmentsDB { public async getCommitmentIndex(commitment: Fr): Promise { const timer = new Timer(); const index = await this.db.findLeafIndex(MerkleTreeId.NOTE_HASH_TREE, commitment); - this.log.debug(`[DB] Fetched commitment index`, { + this.logger.debug(`[DB] Fetched commitment index`, { eventName: 'public-db-access', duration: timer.ms(), operation: 'get-commitment-index', @@ -311,7 +268,7 @@ export class WorldStateDB implements CommitmentsDB { public async getCommitmentValue(leafIndex: bigint): Promise { const timer = new Timer(); const leafValue = await this.db.getLeafValue(MerkleTreeId.NOTE_HASH_TREE, leafIndex); - this.log.debug(`[DB] Fetched commitment leaf value`, { + this.logger.debug(`[DB] Fetched commitment leaf value`, { eventName: 'public-db-access', duration: timer.ms(), operation: 'get-commitment-leaf-value', @@ -322,11 +279,49 @@ export class WorldStateDB implements CommitmentsDB { public async getNullifierIndex(nullifier: Fr): Promise { const timer = new Timer(); const index = await this.db.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer()); - this.log.debug(`[DB] Fetched nullifier index`, { + this.logger.debug(`[DB] Fetched nullifier index`, { eventName: 'public-db-access', duration: timer.ms(), operation: 'get-nullifier-index', } satisfies PublicDBAccessStats); return index; } + + /** + * Commit the pending changes to the DB. + * @returns Nothing. + */ + commit(): Promise { + for (const [k, v] of this.checkpointedWriteCache) { + this.committedWriteCache.set(k, v); + } + // uncommitted writes take precedence over checkpointed writes + // since they are the most recent + for (const [k, v] of this.uncommittedWriteCache) { + this.committedWriteCache.set(k, v); + } + return this.rollbackToCommit(); + } + + /** + * Rollback the pending changes. + * @returns Nothing. + */ + async rollbackToCommit(): Promise { + await this.rollbackToCheckpoint(); + this.checkpointedWriteCache = new Map(); + return Promise.resolve(); + } + + checkpoint(): Promise { + for (const [k, v] of this.uncommittedWriteCache) { + this.checkpointedWriteCache.set(k, v); + } + return this.rollbackToCheckpoint(); + } + + rollbackToCheckpoint(): Promise { + this.uncommittedWriteCache = new Map(); + return Promise.resolve(); + } } diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 7778147a3ef..5558997ab9c 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -50,7 +50,7 @@ import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { PublicExecutionResultBuilder, makeFunctionCall } from '../mocks/fixtures.js'; -import { type ContractsDataSourcePublicDB, type WorldStatePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { RealPublicKernelCircuitSimulator } from './public_kernel.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; import { PublicProcessor } from './public_processor.js'; @@ -58,8 +58,7 @@ import { PublicProcessor } from './public_processor.js'; describe('public_processor', () => { let db: MockProxy; let publicExecutor: MockProxy; - let publicContractsDB: MockProxy; - let publicWorldStateDB: MockProxy; + let worldStateDB: MockProxy; let prover: MockProxy; let proof: ClientIvcProof; @@ -70,15 +69,14 @@ describe('public_processor', () => { beforeEach(() => { db = mock(); publicExecutor = mock(); - publicContractsDB = mock(); - publicWorldStateDB = mock(); + worldStateDB = mock(); prover = mock(); proof = ClientIvcProof.empty(); root = Buffer.alloc(32, 5); db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); - publicWorldStateDB.storageRead.mockResolvedValue(Fr.ZERO); + worldStateDB.storageRead.mockResolvedValue(Fr.ZERO); }); describe('with mock circuits', () => { @@ -92,8 +90,7 @@ describe('public_processor', () => { publicKernel, GlobalVariables.empty(), Header.empty(), - publicContractsDB, - publicWorldStateDB, + worldStateDB, new NoopTelemetryClient(), ); }); @@ -138,8 +135,8 @@ describe('public_processor', () => { expect(processed).toEqual([]); expect(failed[0].tx).toEqual(tx); expect(failed[0].error).toEqual(new SimulationError(`Failed`, [])); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); expect(prover.addNewTx).toHaveBeenCalledTimes(0); }); }); @@ -191,8 +188,7 @@ describe('public_processor', () => { publicKernel, GlobalVariables.from({ ...GlobalVariables.empty(), gasFees: GasFees.default() }), header, - publicContractsDB, - publicWorldStateDB, + worldStateDB, new NoopTelemetryClient(), ); }); @@ -211,8 +207,8 @@ describe('public_processor', () => { expect(processed[0].hash).toEqual(tx.getTxHash()); expect(processed[0].clientIvcProof).toEqual(proof); expect(publicExecutor.simulate).toHaveBeenCalledTimes(2); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); // we keep the logs expect(processed[0].encryptedLogs.getTotalLogCount()).toBe(6); @@ -245,10 +241,10 @@ describe('public_processor', () => { expect(failed).toHaveLength(0); expect(publicExecutor.simulate).toHaveBeenCalledTimes(1); // we only call checkpoint after successful "setup" - expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); }); @@ -268,8 +264,8 @@ describe('public_processor', () => { expect(processed[1].clientIvcProof).toEqual(proof); expect(failed).toHaveLength(0); expect(publicExecutor.simulate).toHaveBeenCalledTimes(2); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(2); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(2); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); expect(prover.addNewTx).toHaveBeenCalledWith(processed[1]); @@ -383,10 +379,10 @@ describe('public_processor', () => { expect(appLogicSpy).toHaveBeenCalledTimes(2); expect(teardownSpy).toHaveBeenCalledTimes(2); expect(publicExecutor.simulate).toHaveBeenCalledTimes(3); - expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); const txEffect = toTxEffect(processed[0], GasFees.default()); const numPublicDataWrites = 5; @@ -490,10 +486,10 @@ describe('public_processor', () => { expect(teardownSpy).toHaveBeenCalledTimes(0); expect(publicExecutor.simulate).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); expect(prover.addNewTx).toHaveBeenCalledTimes(0); }); @@ -587,10 +583,10 @@ describe('public_processor', () => { expect(appLogicSpy).toHaveBeenCalledTimes(1); expect(teardownSpy).toHaveBeenCalledTimes(2); expect(publicExecutor.simulate).toHaveBeenCalledTimes(3); - expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); const txEffect = toTxEffect(processed[0], GasFees.default()); const numPublicDataWrites = 3; @@ -703,10 +699,10 @@ describe('public_processor', () => { expect(appLogicSpy).toHaveBeenCalledTimes(1); expect(teardownSpy).toHaveBeenCalledTimes(2); expect(publicExecutor.simulate).toHaveBeenCalledTimes(3); - expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(2); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(2); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); const txEffect = toTxEffect(processed[0], GasFees.default()); const numPublicDataWrites = 3; @@ -873,10 +869,10 @@ describe('public_processor', () => { expect(publicExecutor.simulate).toHaveBeenNthCalledWith(2, ...expectedSimulateCall(afterSetupGas, 0)); expect(publicExecutor.simulate).toHaveBeenNthCalledWith(3, ...expectedSimulateCall(teardownGas, expectedTxFee)); - expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); expect(processed[0].data.end.gasUsed).toEqual(Gas.from(expectedTotalGasUsed)); expect(processed[0].gasUsed[PublicKernelType.SETUP]).toEqual(setupGasUsed); @@ -988,8 +984,8 @@ describe('public_processor', () => { inclusionFee: new Fr(inclusionFee), }); - publicWorldStateDB.storageRead.mockResolvedValue(new Fr(initialBalance)); - publicWorldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => + worldStateDB.storageRead.mockResolvedValue(new Fr(initialBalance)); + worldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); @@ -998,9 +994,9 @@ describe('public_processor', () => { expect(failed.map(f => f.error)).toEqual([]); expect(processed).toHaveLength(1); expect(publicExecutor.simulate).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.storageWrite).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.storageWrite).toHaveBeenCalledTimes(1); expect(processed[0].data.feePayer).toEqual(feePayer); expect(processed[0].finalPublicDataUpdateRequests[MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]).toEqual( PublicDataUpdateRequest.from({ @@ -1028,8 +1024,8 @@ describe('public_processor', () => { inclusionFee: new Fr(inclusionFee), }); - publicWorldStateDB.storageRead.mockResolvedValue(new Fr(initialBalance)); - publicWorldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => + worldStateDB.storageRead.mockResolvedValue(new Fr(initialBalance)); + worldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); @@ -1040,9 +1036,9 @@ describe('public_processor', () => { expect(processed[0].hash).toEqual(tx.getTxHash()); expect(processed[0].clientIvcProof).toEqual(proof); expect(publicExecutor.simulate).toHaveBeenCalledTimes(2); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.storageWrite).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.storageWrite).toHaveBeenCalledTimes(1); expect(processed[0].data.feePayer).toEqual(feePayer); expect(processed[0].finalPublicDataUpdateRequests[MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]).toEqual( PublicDataUpdateRequest.from({ @@ -1070,8 +1066,8 @@ describe('public_processor', () => { inclusionFee: new Fr(inclusionFee), }); - publicWorldStateDB.storageRead.mockResolvedValue(Fr.ZERO); - publicWorldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => + worldStateDB.storageRead.mockResolvedValue(Fr.ZERO); + worldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); @@ -1088,9 +1084,9 @@ describe('public_processor', () => { expect(processed[0].hash).toEqual(tx.getTxHash()); expect(processed[0].clientIvcProof).toEqual(proof); expect(publicExecutor.simulate).toHaveBeenCalledTimes(2); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.storageWrite).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.storageWrite).toHaveBeenCalledTimes(1); expect(processed[0].data.feePayer).toEqual(feePayer); expect(processed[0].finalPublicDataUpdateRequests[0]).toEqual( PublicDataUpdateRequest.from({ @@ -1118,8 +1114,8 @@ describe('public_processor', () => { inclusionFee: new Fr(inclusionFee), }); - publicWorldStateDB.storageRead.mockResolvedValue(new Fr(initialBalance)); - publicWorldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => + worldStateDB.storageRead.mockResolvedValue(new Fr(initialBalance)); + worldStateDB.storageWrite.mockImplementation((address: AztecAddress, slot: Fr) => Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); @@ -1129,9 +1125,9 @@ describe('public_processor', () => { expect(failed).toHaveLength(1); expect(failed[0].error.message).toMatch(/Not enough balance/i); expect(publicExecutor.simulate).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - expect(publicWorldStateDB.storageWrite).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(worldStateDB.storageWrite).toHaveBeenCalledTimes(0); }); }); }); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 84cffb190f0..6066177ff43 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -29,7 +29,6 @@ import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer'; import { PublicExecutor, - type PublicStateDB, type SimulationProvider, computeFeePayerBalanceLeafSlot, computeFeePayerBalanceStorageSlot, @@ -40,7 +39,7 @@ import { type MerkleTreeOperations } from '@aztec/world-state'; import { type AbstractPhaseManager } from './abstract_phase_manager.js'; import { PhaseManagerFactory } from './phase_manager_factory.js'; -import { ContractsDataSourcePublicDB, WorldStateDB, WorldStatePublicDB } from './public_db_sources.js'; +import { WorldStateDB } from './public_db_sources.js'; import { RealPublicKernelCircuitSimulator } from './public_kernel.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; import { PublicProcessorMetrics } from './public_processor_metrics.js'; @@ -65,25 +64,16 @@ export class PublicProcessorFactory { public create(maybeHistoricalHeader: Header | undefined, globalVariables: GlobalVariables): PublicProcessor { const { merkleTree, telemetryClient } = this; const historicalHeader = maybeHistoricalHeader ?? merkleTree.getInitialHeader(); - const publicContractsDB = new ContractsDataSourcePublicDB(this.contractDataSource); - const worldStatePublicDB = new WorldStatePublicDB(merkleTree); - const worldStateDB = new WorldStateDB(merkleTree); - const publicExecutor = new PublicExecutor( - worldStatePublicDB, - publicContractsDB, - worldStateDB, - historicalHeader, - telemetryClient, - ); + const worldStateDB = new WorldStateDB(merkleTree, this.contractDataSource); + const publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); return new PublicProcessor( merkleTree, publicExecutor, new RealPublicKernelCircuitSimulator(this.simulator), globalVariables, historicalHeader, - publicContractsDB, - worldStatePublicDB, + worldStateDB, this.telemetryClient, ); } @@ -101,8 +91,7 @@ export class PublicProcessor { protected publicKernel: PublicKernelCircuitSimulator, protected globalVariables: GlobalVariables, protected historicalHeader: Header, - protected publicContractsDB: ContractsDataSourcePublicDB, - protected publicStateDB: PublicStateDB, + protected worldStateDB: WorldStateDB, telemetryClient: TelemetryClient, private log = createDebugLogger('aztec:sequencer:public-processor'), ) { @@ -121,6 +110,7 @@ export class PublicProcessor { */ public async process( txs: Tx[], + // NOTE(MD): max transactions is redundant, it should be filtered beforehand maxTransactions = txs.length, processedTxHandler?: ProcessedTxHandler, txValidator?: TxValidator, @@ -128,6 +118,8 @@ export class PublicProcessor { // The processor modifies the tx objects in place, so we need to clone them. txs = txs.map(tx => Tx.clone(tx)); const result: ProcessedTx[] = []; + + // NOTE(md): isnt the point that no txs should fail? const failed: FailedTx[] = []; let returns: NestedProcessReturnValues[] = []; @@ -151,7 +143,7 @@ export class PublicProcessor { processedTx.finalPublicDataUpdateRequests = await this.createFinalDataUpdateRequests(processedTx); // Commit the state updates from this transaction - await this.publicStateDB.commit(); + await this.worldStateDB.commit(); validateProcessedTx(processedTx); // Re-validate the transaction @@ -217,14 +209,14 @@ export class PublicProcessor { const balance = existingBalanceWriteIndex > -1 ? finalPublicDataUpdateRequests[existingBalanceWriteIndex].newValue - : await this.publicStateDB.storageRead(feeJuiceAddress, balanceSlot); + : await this.worldStateDB.storageRead(feeJuiceAddress, balanceSlot); if (balance.lt(txFee)) { throw new Error(`Not enough balance for fee payer to pay for transaction (got ${balance} needs ${txFee})`); } const updatedBalance = balance.sub(txFee); - await this.publicStateDB.storageWrite(feeJuiceAddress, balanceSlot, updatedBalance); + await this.worldStateDB.storageWrite(feeJuiceAddress, balanceSlot, updatedBalance); finalPublicDataUpdateRequests[ existingBalanceWriteIndex > -1 ? existingBalanceWriteIndex : MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX @@ -247,10 +239,10 @@ export class PublicProcessor { this.publicKernel, this.globalVariables, this.historicalHeader, - this.publicContractsDB, - this.publicStateDB, + this.worldStateDB, ); this.log.debug(`Beginning processing in phase ${phase?.phase} for tx ${tx.getTxHash()}`); + let publicKernelPublicInput = tx.data.toPublicKernelCircuitPublicInputs(); let lastKernelArtifact: ProtocolArtifact = 'PrivateKernelTailToPublicArtifact'; // All txs with public calls must carry tail to public proofs let finalKernelOutput: KernelCircuitPublicInputs | undefined; @@ -285,8 +277,7 @@ export class PublicProcessor { this.publicKernel, this.globalVariables, this.historicalHeader, - this.publicContractsDB, - this.publicStateDB, + this.worldStateDB, ); } diff --git a/yarn-project/simulator/src/public/setup_phase_manager.test.ts b/yarn-project/simulator/src/public/setup_phase_manager.test.ts index a765c59e984..58333097a03 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.test.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.test.ts @@ -6,7 +6,7 @@ import { type MerkleTreeOperations } from '@aztec/world-state'; import { it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { type ContractsDataSourcePublicDB, type WorldStatePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; import { SetupPhaseManager } from './setup_phase_manager.js'; @@ -19,8 +19,7 @@ class TestSetupPhaseManager extends SetupPhaseManager { describe('setup_phase_manager', () => { let db: MockProxy; let publicExecutor: MockProxy; - let publicContractsDB: MockProxy; - let publicWorldStateDB: MockProxy; + let worldStateDB: MockProxy; let publicKernel: MockProxy; let root: Buffer; @@ -30,8 +29,7 @@ describe('setup_phase_manager', () => { beforeEach(() => { db = mock(); publicExecutor = mock(); - publicContractsDB = mock(); - publicWorldStateDB = mock(); + worldStateDB = mock(); root = Buffer.alloc(32, 5); db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); @@ -42,8 +40,7 @@ describe('setup_phase_manager', () => { publicKernel, GlobalVariables.empty(), Header.empty(), - publicContractsDB, - publicWorldStateDB, + worldStateDB, ); }); diff --git a/yarn-project/simulator/src/public/setup_phase_manager.ts b/yarn-project/simulator/src/public/setup_phase_manager.ts index 2e40f0ffeb0..4fd1d02986b 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.ts @@ -1,11 +1,11 @@ import { PublicKernelType, type PublicProvingRequest, type Tx } from '@aztec/circuit-types'; import { type GlobalVariables, type Header, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor, type PublicStateDB } from '@aztec/simulator'; +import { type PublicExecutor } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { AbstractPhaseManager, makeAvmProvingRequest } from './abstract_phase_manager.js'; -import { type ContractsDataSourcePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; /** @@ -18,8 +18,7 @@ export class SetupPhaseManager extends AbstractPhaseManager { publicKernel: PublicKernelCircuitSimulator, globalVariables: GlobalVariables, historicalHeader: Header, - protected publicContractsDB: ContractsDataSourcePublicDB, - protected publicStateDB: PublicStateDB, + protected worldStateDB: WorldStateDB, phase: PublicKernelType = PublicKernelType.SETUP, ) { super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); @@ -33,17 +32,17 @@ export class SetupPhaseManager extends AbstractPhaseManager { this.log.verbose(`Processing tx ${tx.getTxHash()}`); // TODO(#6464): Should we allow emitting contracts in the private setup phase? // if so, this should only add contracts that were deployed during private app logic. - await this.publicContractsDB.addNewContracts(tx); + await this.worldStateDB.addNewContracts(tx); const { publicProvingInformation, kernelOutput, lastKernelArtifact, newUnencryptedLogs, revertReason, gasUsed } = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousCircuit).catch( // the abstract phase manager throws if simulation gives error in a non-revertible phase async err => { - await this.publicStateDB.rollbackToCommit(); + await this.worldStateDB.rollbackToCommit(); throw err; }, ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedLogs); - await this.publicStateDB.checkpoint(); + await this.worldStateDB.checkpoint(); // Return a list of setup proving requests const publicProvingRequests: PublicProvingRequest[] = publicProvingInformation.map(info => { diff --git a/yarn-project/simulator/src/public/tail_phase_manager.ts b/yarn-project/simulator/src/public/tail_phase_manager.ts index 08b0916d30c..19de912dd19 100644 --- a/yarn-project/simulator/src/public/tail_phase_manager.ts +++ b/yarn-project/simulator/src/public/tail_phase_manager.ts @@ -10,11 +10,11 @@ import { mergeAccumulatedData, } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor, type PublicStateDB } from '@aztec/simulator'; +import { type PublicExecutor } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { AbstractPhaseManager } from './abstract_phase_manager.js'; -import { type ContractsDataSourcePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; export class TailPhaseManager extends AbstractPhaseManager { @@ -24,8 +24,7 @@ export class TailPhaseManager extends AbstractPhaseManager { publicKernel: PublicKernelCircuitSimulator, globalVariables: GlobalVariables, historicalHeader: Header, - protected publicContractsDB: ContractsDataSourcePublicDB, - protected publicStateDB: PublicStateDB, + protected worldStateDB: WorldStateDB, phase: PublicKernelType = PublicKernelType.TAIL, ) { super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); @@ -40,7 +39,7 @@ export class TailPhaseManager extends AbstractPhaseManager { const [inputs, finalKernelOutput] = await this.simulate(previousPublicKernelOutput, previousKernelArtifact).catch( // the abstract phase manager throws if simulation gives error in non-revertible phase async err => { - await this.publicStateDB.rollbackToCommit(); + await this.worldStateDB.rollbackToCommit(); throw err; }, ); diff --git a/yarn-project/simulator/src/public/teardown_phase_manager.ts b/yarn-project/simulator/src/public/teardown_phase_manager.ts index 03401e33dbb..85ba32ebd8d 100644 --- a/yarn-project/simulator/src/public/teardown_phase_manager.ts +++ b/yarn-project/simulator/src/public/teardown_phase_manager.ts @@ -7,13 +7,13 @@ import { type PublicKernelCircuitPublicInputs, } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor, type PublicStateDB } from '@aztec/simulator'; +import { type PublicExecutor } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; import { AbstractPhaseManager, makeAvmProvingRequest } from './abstract_phase_manager.js'; -import { type ContractsDataSourcePublicDB } from './public_db_sources.js'; +import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; /** @@ -26,8 +26,7 @@ export class TeardownPhaseManager extends AbstractPhaseManager { publicKernel: PublicKernelCircuitSimulator, globalVariables: GlobalVariables, historicalHeader: Header, - protected publicContractsDB: ContractsDataSourcePublicDB, - protected publicStateDB: PublicStateDB, + protected worldStateDB: WorldStateDB, phase: PublicKernelType = PublicKernelType.TEARDOWN, ) { super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); @@ -43,12 +42,12 @@ export class TeardownPhaseManager extends AbstractPhaseManager { await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousKernelArtifact).catch( // the abstract phase manager throws if simulation gives error in a non-revertible phase async err => { - await this.publicStateDB.rollbackToCommit(); + await this.worldStateDB.rollbackToCommit(); throw err; }, ); if (revertReason) { - await this.publicStateDB.rollbackToCheckpoint(); + await this.worldStateDB.rollbackToCheckpoint(); tx.filterRevertedLogs(kernelOutput); } else { // TODO(#6464): Should we allow emitting contracts in the public teardown phase? From 9ec4fb1d4c28ea21400900298b3a3f0318af0f7c Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 16 Sep 2024 22:24:35 +0000 Subject: [PATCH 049/123] fix --- yarn-project/simulator/src/public/public_db_sources.test.ts | 1 - yarn-project/simulator/src/public/public_processor.ts | 3 --- 2 files changed, 4 deletions(-) diff --git a/yarn-project/simulator/src/public/public_db_sources.test.ts b/yarn-project/simulator/src/public/public_db_sources.test.ts index 02d8355ecb1..4880036b5a8 100644 --- a/yarn-project/simulator/src/public/public_db_sources.test.ts +++ b/yarn-project/simulator/src/public/public_db_sources.test.ts @@ -1,4 +1,3 @@ -// TODO(md): fix this test import { MerkleTreeId } from '@aztec/circuit-types'; import { AztecAddress, Fr, PublicDataTreeLeafPreimage } from '@aztec/circuits.js'; import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 6066177ff43..0e36df968c1 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -110,7 +110,6 @@ export class PublicProcessor { */ public async process( txs: Tx[], - // NOTE(MD): max transactions is redundant, it should be filtered beforehand maxTransactions = txs.length, processedTxHandler?: ProcessedTxHandler, txValidator?: TxValidator, @@ -118,8 +117,6 @@ export class PublicProcessor { // The processor modifies the tx objects in place, so we need to clone them. txs = txs.map(tx => Tx.clone(tx)); const result: ProcessedTx[] = []; - - // NOTE(md): isnt the point that no txs should fail? const failed: FailedTx[] = []; let returns: NestedProcessReturnValues[] = []; From 6f35ee8b7ec15f83e8a8b749b55c858e0273bba4 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 16 Sep 2024 22:38:51 +0000 Subject: [PATCH 050/123] fix --- yarn-project/aztec-node/src/aztec-node/server.ts | 2 +- yarn-project/bb-prover/src/avm_proving.test.ts | 12 +++++------- .../prover-client/src/mocks/test_context.ts | 15 +++++---------- .../src/tx_validator/tx_validator_factory.ts | 9 +++++---- yarn-project/txe/src/oracle/txe_oracle.ts | 7 ++----- 5 files changed, 18 insertions(+), 27 deletions(-) diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index be8d69c9af8..c3e50680f66 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -762,7 +762,7 @@ export class AztecNodeService implements AztecNode { const txValidators: TxValidator[] = [ new DataTxValidator(), new MetadataTxValidator(newGlobalVariables), - new DoubleSpendTxValidator(new WorldStateDB(this.worldStateSynchronizer.getLatest())), + new DoubleSpendTxValidator(new WorldStateDB(this.worldStateSynchronizer.getLatest(), this.contractDataSource)), ]; if (!isSimulation) { diff --git a/yarn-project/bb-prover/src/avm_proving.test.ts b/yarn-project/bb-prover/src/avm_proving.test.ts index ebf5af81ea5..1c88861ff53 100644 --- a/yarn-project/bb-prover/src/avm_proving.test.ts +++ b/yarn-project/bb-prover/src/avm_proving.test.ts @@ -33,12 +33,11 @@ import { computeVarArgsHash } from '@aztec/circuits.js/hash'; import { padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; -import { AvmSimulator, type PublicContractsDB, type PublicExecutionResult, type PublicStateDB } from '@aztec/simulator'; +import { AvmSimulator, type PublicContractsDB, type PublicExecutionResult, type WorldStateDB } from '@aztec/simulator'; import { getAvmTestContractBytecode, initContext, initExecutionEnvironment, - initHostStorage, initPersistableStateManager, resolveAvmTestContractAssertionMessage, } from '@aztec/simulator/avm/fixtures'; @@ -227,16 +226,15 @@ const proveAndVerifyAvmTestContract = async ( }).withAddress(environment.address); contractsDb.getContractInstance.mockResolvedValue(Promise.resolve(contractInstance)); - const storageDb = mock(); + const worldStateDB = mock(); const storageValue = new Fr(5); - storageDb.storageRead.mockResolvedValue(Promise.resolve(storageValue)); + worldStateDB.storageRead.mockResolvedValue(Promise.resolve(storageValue)); - const hostStorage = initHostStorage({ contractsDb }); const trace = new PublicSideEffectTrace(startSideEffectCounter); - const persistableState = initPersistableStateManager({ hostStorage, trace }); + const persistableState = initPersistableStateManager({ worldStateDB, trace }); const context = initContext({ env: environment, persistableState }); const nestedCallBytecode = getAvmTestContractBytecode('add_args_return'); - jest.spyOn(hostStorage.contractsDb, 'getBytecode').mockResolvedValue(nestedCallBytecode); + jest.spyOn(worldStateDB, 'getBytecode').mockResolvedValue(nestedCallBytecode); const startGas = new Gas(context.machineState.gasLeft.daGas, context.machineState.gasLeft.l2Gas); diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index e0d6e170685..f5d9b78a8ac 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -13,7 +13,6 @@ import { type Fr } from '@aztec/foundation/fields'; import { type DebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { - type ContractsDataSourcePublicDB, type PublicExecutionResult, PublicExecutionResultBuilder, type PublicExecutor, @@ -21,7 +20,7 @@ import { RealPublicKernelCircuitSimulator, type SimulationProvider, WASMSimulator, - type WorldStatePublicDB, + type WorldStateDB, } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { MerkleTrees } from '@aztec/world-state'; @@ -41,8 +40,7 @@ import { getEnvironmentConfig, getSimulationProvider, makeGlobals } from './fixt export class TestContext { constructor( public publicExecutor: MockProxy, - public publicContractsDB: MockProxy, - public publicWorldStateDB: MockProxy, + public worldStateDB: MockProxy, public publicProcessor: PublicProcessor, public simulationProvider: SimulationProvider, public globalVariables: GlobalVariables, @@ -71,8 +69,7 @@ export class TestContext { const globalVariables = makeGlobals(blockNumber); const publicExecutor = mock(); - const publicContractsDB = mock(); - const publicWorldStateDB = mock(); + const worldStateDB = mock(); const publicKernel = new RealPublicKernelCircuitSimulator(new WASMSimulator()); const telemetry = new NoopTelemetryClient(); @@ -94,8 +91,7 @@ export class TestContext { publicKernel, GlobalVariables.empty(), Header.empty(), - publicContractsDB, - publicWorldStateDB, + worldStateDB, telemetry, ); @@ -130,8 +126,7 @@ export class TestContext { return new this( publicExecutor, - publicContractsDB, - publicWorldStateDB, + worldStateDB, processor, simulationProvider, globalVariables, diff --git a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts index 97f50cf4fae..6caf6ebb9ab 100644 --- a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts +++ b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts @@ -1,7 +1,7 @@ import { type AllowedElement, type ProcessedTx, type Tx, type TxValidator } from '@aztec/circuit-types'; import { type GlobalVariables } from '@aztec/circuits.js'; import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; -import { WorldStateDB, WorldStatePublicDB } from '@aztec/simulator'; +import { WorldStateDB } from '@aztec/simulator'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; @@ -20,16 +20,17 @@ export class TxValidatorFactory { ) {} validatorForNewTxs(globalVariables: GlobalVariables, setupAllowList: AllowedElement[]): TxValidator { + const worldStateDB = new WorldStateDB(this.merkleTreeDb, this.contractDataSource); return new AggregateTxValidator( new DataTxValidator(), new MetadataTxValidator(globalVariables), - new DoubleSpendTxValidator(new WorldStateDB(this.merkleTreeDb)), + new DoubleSpendTxValidator(worldStateDB), new PhasesTxValidator(this.contractDataSource, setupAllowList), - new GasTxValidator(new WorldStatePublicDB(this.merkleTreeDb), FeeJuiceAddress, this.enforceFees), + new GasTxValidator(worldStateDB, FeeJuiceAddress, this.enforceFees), ); } validatorForProcessedTxs(): TxValidator { - return new DoubleSpendTxValidator(new WorldStateDB(this.merkleTreeDb)); + return new DoubleSpendTxValidator(new WorldStateDB(this.merkleTreeDb, this.contractDataSource)); } } diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 93a8bcc59f2..cccf85cef4a 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -51,7 +51,6 @@ import { Timer } from '@aztec/foundation/timer'; import { type KeyStore } from '@aztec/key-store'; import { ContractDataOracle } from '@aztec/pxe'; import { - ContractsDataSourcePublicDB, ExecutionError, type ExecutionNoteCache, type MessageLoadOracleInputs, @@ -74,7 +73,6 @@ import { MerkleTreeSnapshotOperationsFacade, type MerkleTrees } from '@aztec/wor import { type TXEDatabase } from '../util/txe_database.js'; import { TXEPublicContractDataSource } from '../util/txe_public_contract_data_source.js'; -import { TXEPublicStateDB } from '../util/txe_public_state_db.js'; export class TXE implements TypedOracle { private blockNumber = 0; @@ -717,10 +715,9 @@ export class TXE implements TypedOracle { const header = Header.empty(); header.state = await this.trees.getStateReference(true); header.globalVariables.blockNumber = new Fr(await this.getBlockNumber()); + const executor = new PublicExecutor( - new TXEPublicStateDB(this), - new ContractsDataSourcePublicDB(new TXEPublicContractDataSource(this)), - new WorldStateDB(this.trees.asLatest()), + new WorldStateDB(this.trees.asLatest(), new TXEPublicContractDataSource(this)), header, new NoopTelemetryClient(), ); From 2e80f835652fd39d6ceeff0706923f7dc692f71a Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 16 Sep 2024 23:01:21 +0000 Subject: [PATCH 051/123] chore: make phase manager less noisy --- .../src/public/abstract_phase_manager.ts | 41 +++++++++--- .../src/public/app_logic_phase_manager.ts | 20 ++---- .../src/public/phase_manager_factory.ts | 65 +++---------------- .../simulator/src/public/public_processor.ts | 38 +++++------ .../src/public/setup_phase_manager.test.ts | 13 ++-- .../src/public/setup_phase_manager.ts | 20 ++---- .../src/public/tail_phase_manager.ts | 20 +----- .../src/public/teardown_phase_manager.ts | 26 ++------ 8 files changed, 82 insertions(+), 161 deletions(-) diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index 054218ae5a2..5abd9635446 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -64,6 +64,7 @@ import { import { type PublicExecutionResult, type PublicExecutor, + type WorldStateDB, accumulateReturnValues, isPublicExecutionResult, } from '@aztec/simulator'; @@ -139,19 +140,39 @@ export type PhaseResult = { gasUsed?: Gas; }; +export interface PhaseConfig { + db: MerkleTreeOperations; + publicExecutor: PublicExecutor; + publicKernel: PublicKernelCircuitSimulator; + globalVariables: GlobalVariables; + historicalHeader: Header; + worldStateDB: WorldStateDB; + phase?: PublicKernelType; +} + export abstract class AbstractPhaseManager { protected hintsBuilder: HintsBuilder; protected log: DebugLogger; - constructor( - protected db: MerkleTreeOperations, - protected publicExecutor: PublicExecutor, - protected publicKernel: PublicKernelCircuitSimulator, - protected globalVariables: GlobalVariables, - protected historicalHeader: Header, - public phase: PublicKernelType, - ) { - this.hintsBuilder = new HintsBuilder(db); - this.log = createDebugLogger(`aztec:sequencer:${phase}`); + + protected db: MerkleTreeOperations; + protected publicExecutor: PublicExecutor; + protected publicKernel: PublicKernelCircuitSimulator; + protected globalVariables: GlobalVariables; + protected historicalHeader: Header; + protected worldStateDB: WorldStateDB; + public phase: PublicKernelType; + + constructor(config: PhaseConfig) { + this.db = config.db; + this.publicExecutor = config.publicExecutor; + this.publicKernel = config.publicKernel; + this.globalVariables = config.globalVariables; + this.historicalHeader = config.historicalHeader; + this.worldStateDB = config.worldStateDB; + this.phase = config.phase ?? PublicKernelType.SETUP; + + this.hintsBuilder = new HintsBuilder(this.db); + this.log = createDebugLogger(`aztec:sequencer:${this.phase}`); } /** diff --git a/yarn-project/simulator/src/public/app_logic_phase_manager.ts b/yarn-project/simulator/src/public/app_logic_phase_manager.ts index 5d78b366766..e6c0b1e92a4 100644 --- a/yarn-project/simulator/src/public/app_logic_phase_manager.ts +++ b/yarn-project/simulator/src/public/app_logic_phase_manager.ts @@ -1,27 +1,15 @@ import { PublicKernelType, type PublicProvingRequest, type Tx } from '@aztec/circuit-types'; -import { type GlobalVariables, type Header, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; +import { type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor } from '@aztec/simulator'; -import { type MerkleTreeOperations } from '@aztec/world-state'; -import { AbstractPhaseManager, makeAvmProvingRequest } from './abstract_phase_manager.js'; -import { type WorldStateDB } from './public_db_sources.js'; -import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; +import { AbstractPhaseManager, type PhaseConfig, makeAvmProvingRequest } from './abstract_phase_manager.js'; /** * The phase manager responsible for performing the fee preparation phase. */ export class AppLogicPhaseManager extends AbstractPhaseManager { - constructor( - db: MerkleTreeOperations, - publicExecutor: PublicExecutor, - publicKernel: PublicKernelCircuitSimulator, - globalVariables: GlobalVariables, - historicalHeader: Header, - protected worldStateDB: WorldStateDB, - phase: PublicKernelType = PublicKernelType.APP_LOGIC, - ) { - super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); + constructor(config: PhaseConfig, public override phase: PublicKernelType = PublicKernelType.APP_LOGIC) { + super(config); } override async handle( diff --git a/yarn-project/simulator/src/public/phase_manager_factory.ts b/yarn-project/simulator/src/public/phase_manager_factory.ts index 7c42be49b9b..a5dbe868a53 100644 --- a/yarn-project/simulator/src/public/phase_manager_factory.ts +++ b/yarn-project/simulator/src/public/phase_manager_factory.ts @@ -1,12 +1,8 @@ import { PublicKernelType, type Tx } from '@aztec/circuit-types'; -import { type GlobalVariables, type Header, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; -import { type PublicExecutor } from '@aztec/simulator'; -import { type MerkleTreeOperations } from '@aztec/world-state'; +import { type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; -import { type AbstractPhaseManager } from './abstract_phase_manager.js'; +import { type AbstractPhaseManager, type PhaseConfig } from './abstract_phase_manager.js'; import { AppLogicPhaseManager } from './app_logic_phase_manager.js'; -import { type WorldStateDB } from './public_db_sources.js'; -import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; import { SetupPhaseManager } from './setup_phase_manager.js'; import { TailPhaseManager } from './tail_phase_manager.js'; import { TeardownPhaseManager } from './teardown_phase_manager.js'; @@ -24,36 +20,14 @@ export class CannotTransitionToSetupError extends Error { } export class PhaseManagerFactory { - public static phaseFromTx( - tx: Tx, - db: MerkleTreeOperations, - publicExecutor: PublicExecutor, - publicKernel: PublicKernelCircuitSimulator, - globalVariables: GlobalVariables, - historicalHeader: Header, - worldStateDB: WorldStateDB, - ): AbstractPhaseManager | undefined { + public static phaseFromTx(tx: Tx, config: PhaseConfig): AbstractPhaseManager | undefined { const data = tx.data.forPublic!; if (data.needsSetup) { - return new SetupPhaseManager(db, publicExecutor, publicKernel, globalVariables, historicalHeader, worldStateDB); + return new SetupPhaseManager(config); } else if (data.needsAppLogic) { - return new AppLogicPhaseManager( - db, - publicExecutor, - publicKernel, - globalVariables, - historicalHeader, - worldStateDB, - ); + return new AppLogicPhaseManager(config); } else if (data.needsTeardown) { - return new TeardownPhaseManager( - db, - publicExecutor, - publicKernel, - globalVariables, - historicalHeader, - worldStateDB, - ); + return new TeardownPhaseManager(config); } else { return undefined; } @@ -62,12 +36,7 @@ export class PhaseManagerFactory { public static phaseFromOutput( output: PublicKernelCircuitPublicInputs, currentPhaseManager: AbstractPhaseManager, - db: MerkleTreeOperations, - publicExecutor: PublicExecutor, - publicKernel: PublicKernelCircuitSimulator, - globalVariables: GlobalVariables, - historicalHeader: Header, - worldStateDB: WorldStateDB, + config: PhaseConfig, ): AbstractPhaseManager | undefined { if (output.needsSetup) { throw new CannotTransitionToSetupError(); @@ -75,28 +44,14 @@ export class PhaseManagerFactory { if (currentPhaseManager.phase === PublicKernelType.APP_LOGIC) { throw new PhaseDidNotChangeError(currentPhaseManager.phase); } - return new AppLogicPhaseManager( - db, - publicExecutor, - publicKernel, - globalVariables, - historicalHeader, - worldStateDB, - ); + return new AppLogicPhaseManager(config); } else if (output.needsTeardown) { if (currentPhaseManager.phase === PublicKernelType.TEARDOWN) { throw new PhaseDidNotChangeError(currentPhaseManager.phase); } - return new TeardownPhaseManager( - db, - publicExecutor, - publicKernel, - globalVariables, - historicalHeader, - worldStateDB, - ); + return new TeardownPhaseManager(config); } else if (currentPhaseManager.phase !== PublicKernelType.TAIL) { - return new TailPhaseManager(db, publicExecutor, publicKernel, globalVariables, historicalHeader, worldStateDB); + return new TailPhaseManager(config); } else { return undefined; } diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 0e36df968c1..67faee6463d 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -37,7 +37,7 @@ import { Attributes, type TelemetryClient, type Tracer, trackSpan } from '@aztec import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; -import { type AbstractPhaseManager } from './abstract_phase_manager.js'; +import { type AbstractPhaseManager, type PhaseConfig } from './abstract_phase_manager.js'; import { PhaseManagerFactory } from './phase_manager_factory.js'; import { WorldStateDB } from './public_db_sources.js'; import { RealPublicKernelCircuitSimulator } from './public_kernel.js'; @@ -229,15 +229,16 @@ export class PublicProcessor { const timer = new Timer(); let returnValues: NestedProcessReturnValues[] = []; const publicProvingRequests: PublicProvingRequest[] = []; - let phase: AbstractPhaseManager | undefined = PhaseManagerFactory.phaseFromTx( - tx, - this.db, - this.publicExecutor, - this.publicKernel, - this.globalVariables, - this.historicalHeader, - this.worldStateDB, - ); + + const phaseManagerConfig: PhaseConfig = { + db: this.db, + publicExecutor: this.publicExecutor, + publicKernel: this.publicKernel, + globalVariables: this.globalVariables, + historicalHeader: this.historicalHeader, + worldStateDB: this.worldStateDB, + }; + let phase: AbstractPhaseManager | undefined = PhaseManagerFactory.phaseFromTx(tx, phaseManagerConfig); this.log.debug(`Beginning processing in phase ${phase?.phase} for tx ${tx.getTxHash()}`); let publicKernelPublicInput = tx.data.toPublicKernelCircuitPublicInputs(); @@ -266,16 +267,13 @@ export class PublicProcessor { lastKernelArtifact = output.lastKernelArtifact; finalKernelOutput = output.finalKernelOutput; revertReason ??= output.revertReason; - phase = PhaseManagerFactory.phaseFromOutput( - publicKernelPublicInput, - phase, - this.db, - this.publicExecutor, - this.publicKernel, - this.globalVariables, - this.historicalHeader, - this.worldStateDB, - ); + + // Update the phase manager config with the current phase + const phaseConfig = { + ...phaseManagerConfig, + phase: phase.phase, + }; + phase = PhaseManagerFactory.phaseFromOutput(publicKernelPublicInput, phase, phaseConfig); } if (!finalKernelOutput) { diff --git a/yarn-project/simulator/src/public/setup_phase_manager.test.ts b/yarn-project/simulator/src/public/setup_phase_manager.test.ts index 58333097a03..d05ce394ba2 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.test.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.test.ts @@ -1,4 +1,4 @@ -import { type TreeInfo, mockTx } from '@aztec/circuit-types'; +import { PublicKernelType, type TreeInfo, mockTx } from '@aztec/circuit-types'; import { GlobalVariables, Header } from '@aztec/circuits.js'; import { type PublicExecutor } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; @@ -6,6 +6,7 @@ import { type MerkleTreeOperations } from '@aztec/world-state'; import { it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; +import { type PhaseConfig } from './abstract_phase_manager.js'; import { type WorldStateDB } from './public_db_sources.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; import { SetupPhaseManager } from './setup_phase_manager.js'; @@ -34,14 +35,16 @@ describe('setup_phase_manager', () => { root = Buffer.alloc(32, 5); db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); publicKernel = mock(); - phaseManager = new TestSetupPhaseManager( + const config: PhaseConfig = { db, publicExecutor, publicKernel, - GlobalVariables.empty(), - Header.empty(), + globalVariables: GlobalVariables.empty(), + historicalHeader: Header.empty(), + phase: PublicKernelType.SETUP, worldStateDB, - ); + }; + phaseManager = new TestSetupPhaseManager(config); }); it('does not extract non-revertible calls when none exist', function () { diff --git a/yarn-project/simulator/src/public/setup_phase_manager.ts b/yarn-project/simulator/src/public/setup_phase_manager.ts index 4fd1d02986b..6018938467e 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.ts @@ -1,27 +1,15 @@ import { PublicKernelType, type PublicProvingRequest, type Tx } from '@aztec/circuit-types'; -import { type GlobalVariables, type Header, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; +import { type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor } from '@aztec/simulator'; -import { type MerkleTreeOperations } from '@aztec/world-state'; -import { AbstractPhaseManager, makeAvmProvingRequest } from './abstract_phase_manager.js'; -import { type WorldStateDB } from './public_db_sources.js'; -import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; +import { AbstractPhaseManager, type PhaseConfig, makeAvmProvingRequest } from './abstract_phase_manager.js'; /** * The phase manager responsible for performing the fee preparation phase. */ export class SetupPhaseManager extends AbstractPhaseManager { - constructor( - db: MerkleTreeOperations, - publicExecutor: PublicExecutor, - publicKernel: PublicKernelCircuitSimulator, - globalVariables: GlobalVariables, - historicalHeader: Header, - protected worldStateDB: WorldStateDB, - phase: PublicKernelType = PublicKernelType.SETUP, - ) { - super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); + constructor(config: PhaseConfig, public override phase: PublicKernelType = PublicKernelType.SETUP) { + super(config); } override async handle( diff --git a/yarn-project/simulator/src/public/tail_phase_manager.ts b/yarn-project/simulator/src/public/tail_phase_manager.ts index 19de912dd19..8adf8cd5b43 100644 --- a/yarn-project/simulator/src/public/tail_phase_manager.ts +++ b/yarn-project/simulator/src/public/tail_phase_manager.ts @@ -1,7 +1,5 @@ import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { - type GlobalVariables, - type Header, type KernelCircuitPublicInputs, MAX_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, @@ -10,24 +8,12 @@ import { mergeAccumulatedData, } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor } from '@aztec/simulator'; -import { type MerkleTreeOperations } from '@aztec/world-state'; -import { AbstractPhaseManager } from './abstract_phase_manager.js'; -import { type WorldStateDB } from './public_db_sources.js'; -import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; +import { AbstractPhaseManager, type PhaseConfig } from './abstract_phase_manager.js'; export class TailPhaseManager extends AbstractPhaseManager { - constructor( - db: MerkleTreeOperations, - publicExecutor: PublicExecutor, - publicKernel: PublicKernelCircuitSimulator, - globalVariables: GlobalVariables, - historicalHeader: Header, - protected worldStateDB: WorldStateDB, - phase: PublicKernelType = PublicKernelType.TAIL, - ) { - super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); + constructor(config: PhaseConfig, public override phase: PublicKernelType = PublicKernelType.TAIL) { + super(config); } override async handle( diff --git a/yarn-project/simulator/src/public/teardown_phase_manager.ts b/yarn-project/simulator/src/public/teardown_phase_manager.ts index 85ba32ebd8d..96bb188295b 100644 --- a/yarn-project/simulator/src/public/teardown_phase_manager.ts +++ b/yarn-project/simulator/src/public/teardown_phase_manager.ts @@ -1,35 +1,17 @@ import { PublicKernelType, type PublicProvingRequest, type Tx } from '@aztec/circuit-types'; -import { - type Fr, - type Gas, - type GlobalVariables, - type Header, - type PublicKernelCircuitPublicInputs, -} from '@aztec/circuits.js'; +import { type Fr, type Gas, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; -import { type PublicExecutor } from '@aztec/simulator'; -import { type MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; -import { AbstractPhaseManager, makeAvmProvingRequest } from './abstract_phase_manager.js'; -import { type WorldStateDB } from './public_db_sources.js'; -import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; +import { AbstractPhaseManager, type PhaseConfig, makeAvmProvingRequest } from './abstract_phase_manager.js'; /** * The phase manager responsible for performing the fee preparation phase. */ export class TeardownPhaseManager extends AbstractPhaseManager { - constructor( - db: MerkleTreeOperations, - publicExecutor: PublicExecutor, - publicKernel: PublicKernelCircuitSimulator, - globalVariables: GlobalVariables, - historicalHeader: Header, - protected worldStateDB: WorldStateDB, - phase: PublicKernelType = PublicKernelType.TEARDOWN, - ) { - super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); + constructor(config: PhaseConfig, public override phase: PublicKernelType = PublicKernelType.TEARDOWN) { + super(config); } override async handle( From 3927ab94e9fe0ffe25d79045df2dd2a57cf2f40f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:35:37 +0000 Subject: [PATCH 052/123] feat: add re-ex to validator config --- yarn-project/foundation/src/config/env_var.ts | 1 + yarn-project/validator-client/src/config.ts | 8 ++++++++ yarn-project/validator-client/src/validator.ts | 18 +++++++++++------- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 16646956a47..b6e3492c82f 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -112,6 +112,7 @@ export type EnvVar = | 'VALIDATOR_DISABLED' | 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS' | 'VALIDATOR_ATTESTATIONS_POOLING_INTERVAL_MS' + | 'VALIDATOR_RE_EX' | 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING' | 'PROVER_NODE_MAX_PENDING_JOBS' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 241bffbfda1..6eac49903a7 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -22,6 +22,9 @@ export interface ValidatorClientConfig { /** Wait for attestations timeout */ attestationWaitTimeoutMs: number; + + /** Re-execute transactions before attesting */ + validatorReEx: boolean; } export const validatorClientConfigMappings: ConfigMappingsType = { @@ -45,6 +48,11 @@ export const validatorClientConfigMappings: ConfigMappingsType this.attestationWaitTimeoutMs) { + if (elapsedTime > this.config.attestationWaitTimeoutMs) { this.log.error(`Timeout waiting for ${numberOfRequiredAttestations} attestations for slot, ${slot}`); throw new AttestationTimeoutError(numberOfRequiredAttestations, slot); } this.log.verbose( - `Collected ${attestations.length} attestations so far, waiting ${this.attestationPoolingIntervalMs}ms for more...`, + `Collected ${attestations.length} attestations so far, waiting ${this.config.attestationPoolingIntervalMs}ms for more...`, ); - await sleep(this.attestationPoolingIntervalMs); + await sleep(this.config.attestationPoolingIntervalMs); } } } From c80af0e5ffa8492d7d5da67f5a3bd643158ab6a1 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 20 Sep 2024 15:06:20 +0000 Subject: [PATCH 053/123] chore: rename header in avm to historicalHeader --- .../simulator/src/avm/avm_execution_environment.ts | 4 ++-- yarn-project/simulator/src/avm/avm_simulator.test.ts | 2 +- yarn-project/simulator/src/avm/fixtures/index.ts | 2 +- .../simulator/src/public/abstract_phase_manager.ts | 2 ++ .../simulator/src/public/app_logic_phase_manager.ts | 2 +- yarn-project/simulator/src/public/executor.ts | 9 +++++---- 6 files changed, 12 insertions(+), 9 deletions(-) diff --git a/yarn-project/simulator/src/avm/avm_execution_environment.ts b/yarn-project/simulator/src/avm/avm_execution_environment.ts index 99c55567942..529d833931d 100644 --- a/yarn-project/simulator/src/avm/avm_execution_environment.ts +++ b/yarn-project/simulator/src/avm/avm_execution_environment.ts @@ -24,7 +24,7 @@ export class AvmExecutionEnvironment { public readonly functionSelector: FunctionSelector, // may be temporary (#7224) public readonly contractCallDepth: Fr, public readonly transactionFee: Fr, - public readonly header: Header, + public readonly historicalHeader: Header, public readonly globals: GlobalVariables, public readonly isStaticCall: boolean, public readonly isDelegateCall: boolean, @@ -50,7 +50,7 @@ export class AvmExecutionEnvironment { functionSelector, this.contractCallDepth.add(Fr.ONE), this.transactionFee, - this.header, + this.historicalHeader, this.globals, isStaticCall, isDelegateCall, diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 4742d6b531e..5a5eef82a37 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -803,7 +803,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { /*nestedEnvironment=*/ expect.objectContaining({ sender: environment.address, // sender is top-level call contractCallDepth: new Fr(1), // top call is depth 0, nested is depth 1 - header: environment.header, // just confirming that nested env looks roughly right + header: environment.historicalHeader, // just confirming that nested env looks roughly right globals: environment.globals, // just confirming that nested env looks roughly right isStaticCall: isStaticCall, // TODO(7121): can't check calldata like this since it is modified on environment construction diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index 28092122612..22fd63fd036 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -63,7 +63,7 @@ export function initExecutionEnvironment(overrides?: Partial { diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 52f664df436..a84f69c2e8a 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -21,7 +21,7 @@ import { PublicSideEffectTrace } from './side_effect_trace.js'; export class PublicExecutor { metrics: ExecutorMetrics; - constructor(private readonly worldStateDB: WorldStateDB, private readonly header: Header, client: TelemetryClient) { + constructor(private readonly worldStateDB: WorldStateDB, private readonly historicalHeader: Header, client: TelemetryClient) { this.metrics = new ExecutorMetrics(client, 'PublicExecutor'); } @@ -43,6 +43,7 @@ export class PublicExecutor { globalVariables: GlobalVariables, availableGas: Gas, _txContext: TxContext, + // TODO(md): this will be shared? Why do we need to pass it everywhere? pendingSiloedNullifiers: Nullifier[], transactionFee: Fr = Fr.ZERO, startSideEffectCounter: number = 0, @@ -63,7 +64,7 @@ export class PublicExecutor { const avmExecutionEnv = createAvmExecutionEnvironment( executionRequest, - this.header, + this.historicalHeader, globalVariables, transactionFee, ); @@ -120,7 +121,7 @@ export class PublicExecutor { */ function createAvmExecutionEnvironment( executionRequest: PublicExecutionRequest, - header: Header, + historicalHeader: Header, globalVariables: GlobalVariables, transactionFee: Fr, ): AvmExecutionEnvironment { @@ -131,7 +132,7 @@ function createAvmExecutionEnvironment( executionRequest.callContext.functionSelector, /*contractCallDepth=*/ Fr.zero(), transactionFee, - header, + historicalHeader, globalVariables, executionRequest.callContext.isStaticCall, executionRequest.callContext.isDelegateCall, From e5377b3db5862200ac77fc502bf61ce6ddd46c5f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 20 Sep 2024 17:35:49 +0000 Subject: [PATCH 054/123] temp --- .../public_processor.ts | 168 ++++++++ .../public_processor2.test.ts | 375 ++++++++++++++++++ 2 files changed, 543 insertions(+) create mode 100644 yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts create mode 100644 yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts diff --git a/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts b/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts new file mode 100644 index 00000000000..198fa138a76 --- /dev/null +++ b/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts @@ -0,0 +1,168 @@ +// A minimal version of the public processor - that does not have the fluff + +import { Tx } from "@aztec/circuit-types"; +import { Gas, GlobalVariables, Header, Nullifier } from "@aztec/circuits.js"; +import { ContractDataSource } from "@aztec/types/contracts"; +import { MerkleTreeOperations } from "@aztec/world-state"; +import { WorldStateDB } from "../public/public_db_sources.js"; +import { PublicExecutor } from "../public/executor.js"; +import { TelemetryClient } from "@aztec/telemetry-client"; +import { PublicExecutionResult } from "../public/execution.js"; +import { ResetSimulatedArtifacts } from "@aztec/noir-protocol-circuits-types"; + + + + +export class PublicProcessor2 { + + public publicExecutor: PublicExecutor; + + + // State + private blockGasLeft: Gas; + private pendingNullifiers: Nullifier[]; + + constructor( + private merkleTrees: MerkleTreeOperations, + private worldStateDB: WorldStateDB, + private globalVariables: GlobalVariables, + // TODO(md): check if thies can be inferred + private historicalHeader: Header, + private telemetryClient: TelemetryClient + ) { + this.publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); + + // TODO: this will be the total gas limit available for a block + this.blockGasLeft = Gas.empty(); + this.pendingNullifiers = []; + + } + + + public async process(txs: Tx[]) { + txs = txs.map(tx => Tx.clone(tx)); + + // TODO: maybe there is some extra validation to make sure that we do not overrun + // the checks that the kernel should have been doing? + // If not we add them to the vm execution + + for (const tx of txs) { + // We always apply the private part of a transaction + await this.applyPrivateStateUpdates(tx); + + // If the transaction has public calls, we then apply the public state upates + if (tx.hasPublicCalls()) { + await this.executePublicCalls(tx); + } + } + + } + + public async applyPrivateStateUpdates(tx: Tx) { + + } + + // NOTES: + // - gas accounting needs to be done here and + + // - maybe make another transaction context that stores the gas for a trnsactions + + public async executePublicCalls(tx: Tx) { + // Transactions are split up into a number of parts + // 1. Non revertible calls - these run with the public kernel setup + // - This includes fee payment transactions + // 2. Public Calls - These are the the noir code that are enqueued in the tx by the users + // - This runs with the public kernel app logic + // 3. Teardown Call - This is the public teardown call that does fee refunds + // - This runs with the public kernel teardown + const nonRevertibleCalls = tx.getNonRevertiblePublicExecutionRequests(); + const publicCalls = tx.getRevertiblePublicExecutionRequests(); + const teardownCall = tx.getPublicTeardownExecutionRequest()!; + + // TODO: there is some other stuff done in the public processor + // rationalise it and find out where else it can be done + const transactionGas = tx.data.constants.txContext.gasSettings.getLimits(); + + // TODO: Check is trnasaction gas updated where it should? + + // Execute the non revertible calls + for (const call of nonRevertibleCalls) { + const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); + + if (!this.temporaryDidCallOrNestedCallRevert(res)) { + console.log("non revertible call succeeded"); + this.addNullifiers(res.nullifiers); + this.worldStateDB.checkpoint(); + } else { + console.log(" call or nested call failed"); + this.worldStateDB.rollbackToCommit(); + + // TODO: When this part fails, we want skip to some cleanup part + // This probably explains the interesting control flow + return; + } + } + + console.log("not returning") + for (const call of publicCalls) { + // Execute the non revertible calls + const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); + + // TODO: tidy this up and below + if (!this.temporaryDidCallOrNestedCallRevert(res)) { + this.addNullifiers(res.nullifiers); + } else { + // Similarly, if a teardown call fails, it will revert + // back to the setup state + this.worldStateDB.rollbackToCheckpoint(); + } + } + + if (teardownCall) { + const res = await this.publicExecutor.simulate(teardownCall, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); + + if (!this.temporaryDidCallOrNestedCallRevert(res)) { + this.addNullifiers(res.nullifiers); + } else { + // Similarly, if a teardown call fails, it will revert + // back to the setup state + this.worldStateDB.rollbackToCheckpoint(); + } + } + + // TODO(md): think about bringing the journal model into this + this.worldStateDB.commit(); + + // On success lower the block gas left -- add a check!!! + this.blockGasLeft = this.blockGasLeft.sub(transactionGas); + } + + addNullifiers(nullifiers: Nullifier[]) { + this.pendingNullifiers.push(...nullifiers); + } + + checkpointOrRollback(reverted: boolean) { + if (reverted) { + this.worldStateDB.rollbackToCheckpoint(); + } else { + this.worldStateDB.checkpoint(); + } + } + + // This is a temporary method, in our current kernel model, + // nested calls will trigger an entire execution reversion + // if any nested call reverts + temporaryDidCallOrNestedCallRevert(result: PublicExecutionResult) { + if (result.revertReason) { + return true; + } + if (result.nestedExecutions.length > 0) { + for (const nested of result.nestedExecutions) { + if (this.temporaryDidCallOrNestedCallRevert(nested)) { + return true; + } + } + } + return false; + } +} diff --git a/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts b/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts new file mode 100644 index 00000000000..9b0120740a5 --- /dev/null +++ b/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts @@ -0,0 +1,375 @@ +import { + PublicDataWrite, + PublicExecutionRequest, + SimulationError, + type TreeInfo, + mockTx, +} from '@aztec/circuit-types'; +import { + AppendOnlyTreeSnapshot, + AztecAddress, + ContractStorageUpdateRequest, + Fr, + Gas, + GasFees, + GasSettings, + GlobalVariables, + Header, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + PUBLIC_DATA_TREE_HEIGHT, + PartialStateReference, + PublicAccumulatedDataBuilder, + PublicDataTreeLeafPreimage, + PublicDataUpdateRequest, + RevertCode, + StateReference, +} from '@aztec/circuits.js'; +import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; +import { fr, makeSelector } from '@aztec/circuits.js/testing'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; +import { + type PublicExecutionResult, + type PublicExecutor, + WorldStateDB, +} from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { type MerkleTreeOperations } from '@aztec/world-state'; + +import { jest } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { PublicExecutionResultBuilder, makeFunctionCall } from '../mocks/fixtures.js'; +import { PublicProcessor2 } from '../public-2-electric-boogaloo/public_processor.js'; +import { Spied, SpyInstance } from 'jest-mock'; + +describe('public_processor', () => { + let db: MockProxy; + let worldStateDB: MockProxy; + let publicExecutorSpy: any; + + let root: Buffer; + + let processor: PublicProcessor2; + + beforeEach(() => { + db = mock(); + worldStateDB = mock(); + + root = Buffer.alloc(32, 5); + + db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); + worldStateDB.storageRead.mockResolvedValue(Fr.ZERO); + }); + + describe('with actual circuits', () => { + let publicDataTree: AppendOnlyTree; + + beforeAll(async () => { + publicDataTree = await newTree( + StandardTree, + openTmpStore(), + new Poseidon(), + 'PublicData', + Fr, + PUBLIC_DATA_TREE_HEIGHT, + 1, // Add a default low leaf for the public data hints to be proved against. + ); + }); + + beforeEach(() => { + const snap = new AppendOnlyTreeSnapshot( + Fr.fromBuffer(publicDataTree.getRoot(true)), + Number(publicDataTree.getNumLeaves(true)), + ); + + const header = Header.empty(); + const stateReference = new StateReference( + header.state.l1ToL2MessageTree, + new PartialStateReference(header.state.partial.noteHashTree, header.state.partial.nullifierTree, snap), + ); + // Clone the whole state because somewhere down the line (AbstractPhaseManager) the public data root is modified in the referenced header directly :/ + header.state = StateReference.fromBuffer(stateReference.toBuffer()); + + db.getStateReference.mockResolvedValue(stateReference); + db.getSiblingPath.mockResolvedValue(publicDataTree.getSiblingPath(0n, false)); + db.getPreviousValueIndex.mockResolvedValue({ index: 0n, alreadyPresent: true }); + db.getLeafPreimage.mockResolvedValue(new PublicDataTreeLeafPreimage(new Fr(0), new Fr(0), new Fr(0), 0n)); + + processor = new PublicProcessor2( + db, + worldStateDB, + GlobalVariables.from({ ...GlobalVariables.empty(), gasFees: GasFees.default() }), + header, + new NoopTelemetryClient(), + ); + + publicExecutorSpy = jest.spyOn(processor.publicExecutor, 'simulate') + + publicExecutorSpy.mockImplementation((req: PublicExecutionRequest) => { + const result = PublicExecutionResultBuilder.fromPublicExecutionRequest({ request: req as PublicExecutionRequest }).build(); + return Promise.resolve(result); + }); + }); + + it('rolls back app logic db updates on failed public execution, but persists setup', async function () { + const tx = mockTx(1, { + hasLogs: true, + numberOfNonRevertiblePublicCallRequests: 1, + numberOfRevertiblePublicCallRequests: 1, + hasPublicTeardownCallRequest: true, + }); + + const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); + const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); + const teardownRequest = tx.getPublicTeardownExecutionRequest()!; + + + const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); + const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; + + const nestedContractAddress = AztecAddress.fromBigInt(112233n); + const contractSlotA = fr(0x100); + const contractSlotB = fr(0x150); + const contractSlotC = fr(0x200); + const contractSlotD = fr(0x250); + const contractSlotE = fr(0x300); + const contractSlotF = fr(0x350); + + let simulatorCallCount = 0; + const simulatorResults: PublicExecutionResult[] = [ + // Setup + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: nonRevertibleRequests[0], + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], + }).build(), + + // App Logic + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: revertibleRequests[0], + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: revertibleRequests[0].callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 13), + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 14), + new ContractStorageUpdateRequest(contractSlotC, fr(0x200), 15), + ], + }).build(), + PublicExecutionResultBuilder.fromFunctionCall({ + from: revertibleRequests[0].contractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + revertReason: new SimulationError('Simulation Failed', []), + }).build(), + ], + }).build(), + + // Teardown + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: teardownRequest, + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 16), + new ContractStorageUpdateRequest(contractSlotD, fr(0x251), 17), + new ContractStorageUpdateRequest(contractSlotE, fr(0x301), 18), + new ContractStorageUpdateRequest(contractSlotF, fr(0x351), 19), + ], + }).build(teardownResultSettings), + ], + }).build(teardownResultSettings), + ]; + + jest.spyOn((processor as any).publicExecutor, 'simulate').mockImplementation(execution => { + if (simulatorCallCount < simulatorResults.length) { + return Promise.resolve(simulatorResults[simulatorCallCount++]); + } else { + throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); + } + }); + + await processor.process([tx]); + + expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(3); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + // We should not need to roll back to the checkpoint, the nested call reverting should not + // mean that the parent call should revert! + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + }); + + it('fails a transaction that reverts in setup', async function () { + const tx = mockTx(1, { + numberOfNonRevertiblePublicCallRequests: 1, + numberOfRevertiblePublicCallRequests: 1, + hasPublicTeardownCallRequest: true, + }); + + const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); + const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); + const teardownRequest = tx.getPublicTeardownExecutionRequest()!; + + const nestedContractAddress = AztecAddress.fromBigInt(112233n); + const contractSlotA = fr(0x100); + const contractSlotB = fr(0x150); + const contractSlotC = fr(0x200); + + let simulatorCallCount = 0; + const simulatorResults: PublicExecutionResult[] = [ + // Setup + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: nonRevertibleRequests[0], + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: nonRevertibleRequests[0].callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), + ], + }).build(), + PublicExecutionResultBuilder.fromFunctionCall({ + from: nonRevertibleRequests[0].callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + revertReason: new SimulationError('Simulation Failed', []), + }).build(), + ], + }).build(), + + // App Logic + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: revertibleRequests[0], + }).build(), + + // Teardown + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: teardownRequest, + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], + }).build(), + ], + }).build(), + ]; + + + jest.spyOn((processor as any).publicExecutor, 'simulate').mockImplementation(execution => { + if (simulatorCallCount < simulatorResults.length) { + return Promise.resolve(simulatorResults[simulatorCallCount++]); + } else { + throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); + } + }); + + await processor.process([tx]); + + expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(1); + + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(0); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); + }); + + it('includes a transaction that reverts in teardown', async function () { + const tx = mockTx(1, { + hasLogs: true, + numberOfNonRevertiblePublicCallRequests: 1, + numberOfRevertiblePublicCallRequests: 1, + hasPublicTeardownCallRequest: true, + }); + + const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); + const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); + const teardownRequest = tx.getPublicTeardownExecutionRequest()!; + + const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); + const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; + + const nestedContractAddress = AztecAddress.fromBigInt(112233n); + const contractSlotA = fr(0x100); + const contractSlotB = fr(0x150); + const contractSlotC = fr(0x200); + + let simulatorCallCount = 0; + const simulatorResults: PublicExecutionResult[] = [ + // Setup + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: nonRevertibleRequests[0], + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: nonRevertibleRequests[0].callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), + ], + }).build(), + ], + }).build(), + + // App Logic + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: revertibleRequests[0], + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), + new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 15), + ], + }).build(), + + // Teardown + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: teardownRequest, + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], + }).build(teardownResultSettings), + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 17)], + revertReason: new SimulationError('Simulation Failed', []), + }).build(teardownResultSettings), + ], + }).build(teardownResultSettings), + ]; + + jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { + if (simulatorCallCount < simulatorResults.length) { + return Promise.resolve(simulatorResults[simulatorCallCount++]); + } else { + throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); + } + }); + + await processor.process([tx]); + + expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(3); + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + + + // we keep the non-revertible logs + // TODO: keep track of this + // expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(3); + // expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(1); + + // expect(processed[0].data.revertCode).toEqual(RevertCode.TEARDOWN_REVERTED); + + // expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + }); + }); +}); From 4c6a2d34823ab6954d87065b35eede51bd2b67ec Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 24 Sep 2024 13:34:08 +0000 Subject: [PATCH 055/123] feat: validator's sync then re-ex --- .../aztec-node/src/aztec-node/server.ts | 2 +- .../end-to-end/src/e2e_p2p_network.test.ts | 43 +++++ .../end-to-end/src/e2e_p2p_reex.test.ts | 153 ++++++++++++++++++ .../end-to-end/src/fixtures/setup_p2p_test.ts | 3 + yarn-project/simulator/src/public/index.ts | 1 + .../light_public_processor.test.ts} | 11 +- .../light_public_processor.ts} | 81 +++++++++- yarn-project/validator-client/package.json | 1 + .../src/errors/validator.error.ts | 18 +++ yarn-project/validator-client/src/factory.ts | 23 ++- .../validator-client/src/validator.test.ts | 30 ++++ .../validator-client/src/validator.ts | 90 ++++++++++- yarn-project/yarn.lock | 1 + 13 files changed, 438 insertions(+), 19 deletions(-) create mode 100644 yarn-project/end-to-end/src/e2e_p2p_reex.test.ts rename yarn-project/simulator/src/{public-2-electric-boogaloo/public_processor2.test.ts => public/light_public_processor.test.ts} (98%) rename yarn-project/simulator/src/{public-2-electric-boogaloo/public_processor.ts => public/light_public_processor.ts} (66%) diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index cf655779674..1fc9f05063b 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -164,7 +164,7 @@ export class AztecNodeService implements AztecNode { const simulationProvider = await createSimulationProvider(config, log); - const validatorClient = createValidatorClient(config, p2pClient); + const validatorClient = await createValidatorClient(config, p2pClient, worldStateSynchronizer, archiver, telemetry); // now create the sequencer const sequencer = config.disableSequencer diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 6caf8f6bec4..982fedf479e 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -230,6 +230,49 @@ describe('e2e_p2p_network', () => { await stopNodes(bootstrapNode, nodes); }); + it.only('should rollup txs from all peers with re-execution', async () => { + // create the bootstrap node for the network + if (!bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + + config.validatorReEx = true; + + // create our network of nodes and submit txs into each of them + // the number of txs per node and the number of txs per rollup + // should be set so that the only way for rollups to be built + // is if the txs are successfully gossiped around the nodes. + const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = await createNodes( + config, + PEER_ID_PRIVATE_KEYS, + bootstrapNodeEnr, + NUM_NODES, + BOOT_NODE_UDP_PORT, + ); + + // wait a bit for peers to discover each other + await sleep(4000); + + for (const node of nodes) { + const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); + contexts.push(context); + } + + // now ensure that all txs were successfully mined + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ), + ); + + // shutdown all nodes. + await stopNodes(bootstrapNode, nodes); + }); + it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; const nodes: AztecNodeService[] = await createNodes( diff --git a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts new file mode 100644 index 00000000000..e9a15f912d9 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts @@ -0,0 +1,153 @@ +// /** +// * A test where the validator nodes re-execute transactions from a block before attesting to it. +// */ + +// import { privateKeyToAccount } from "viem/accounts"; +// import { getPrivateKeyFromIndex, setup } from "./fixtures/utils.js"; +// import { EthAddress, ETHEREUM_SLOT_DURATION } from "@aztec/circuits.js"; +// import { AztecNodeConfig, AztecNodeService } from "@aztec/aztec-node"; +// import { BootstrapNode } from "@aztec/p2p"; +// import { +// CompleteAddress, +// type DebugLogger, +// type DeployL1Contracts, +// EthCheatCodes, +// Fr, +// GrumpkinScalar, +// type SentTx, +// TxStatus, +// sleep, +// } from '@aztec/aztec.js'; +// import { createBootstrapNode, createNodes, NodeContext } from "./fixtures/setup_p2p_test.js"; +// import { getContract } from "viem"; +// import { RollupAbi } from "@aztec/l1-artifacts"; + +// const NUM_NODES = 2; +// const NUM_TX_PER_BLOCK = 4; +// const NUM_TX_PER_NODE = 2; +// const BOOT_NODE_UDP_PORT = 40400; + +// const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); + +// describe('e2e_p2p_reex', () => { +// let config: AztecNodeConfig; +// let logger: DebugLogger; +// let teardown: () => Promise; +// let bootstrapNode: BootstrapNode; +// let bootstrapNodeEnr: string; +// let deployL1ContractsValues: DeployL1Contracts; + + + +// beforeEach(async () => { + +// const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); +// const initialValidators = [EthAddress.fromString(account.address)]; + +// ({ +// teardown, +// config, +// logger, +// deployL1ContractsValues +// } = await setup(0, { +// initialValidators, +// l1BlockTime: ETHEREUM_SLOT_DURATION, +// salt: 420, +// })); + + +// bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); +// bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); + +// config.minTxsPerBlock = NUM_TX_PER_BLOCK; +// config.maxTxsPerBlock = NUM_TX_PER_BLOCK; + +// const rollup = getContract({ +// address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), +// abi: RollupAbi, +// client: deployL1ContractsValues.walletClient, +// }); + +// for (let i = 0; i < NUM_NODES; i++) { +// const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); +// await rollup.write.addValidator([account.address]); +// logger.debug(`Adding ${account.address} as validator`); +// } + +// // Remove the initial sequencer from the set! This was the sequencer we used for perform the setup. +// logger.debug(`Removing ${account.address} as validator`); +// const txHash = await rollup.write.removeValidator([account.address]); + +// await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); + +// // TODO(md): fix this case +// //@note Now we jump ahead to the next epoch such that the validator committee is picked +// // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! +// // Which means that the validator set will still be empty! So anyone can propose. +// const slotsInEpoch = await rollup.read.EPOCH_DURATION(); +// const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); +// const cheatCodes = new EthCheatCodes(config.l1RpcUrl); + +// try { +// await cheatCodes.warp(Number(timestamp)); +// } catch (err) { +// logger.debug('Warp failed, time already satisfied'); +// } + +// // Send and await a tx to make sure we mine a block for the warp to correctly progress. +// await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ +// hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), +// }); +// }); + + +// it('should rollup txs from all peers', async () => { +// // create the bootstrap node for the network +// if (!bootstrapNodeEnr) { +// throw new Error('Bootstrap node ENR is not available'); +// } + +// // Trigger re-execution +// config.validatorReEx = true; + +// // create our network of nodes and submit txs into each of them +// // the number of txs per node and the number of txs per rollup +// // should be set so that the only way for rollups to be built +// // is if the txs are successfully gossiped around the nodes. +// const contexts: NodeContext[] = []; +// const nodes: AztecNodeService[] = await createNodes( +// config, +// PEER_ID_PRIVATE_KEYS, +// bootstrapNodeEnr, +// NUM_NODES, +// BOOT_NODE_UDP_PORT, +// ); + +// // wait a bit for peers to discover each other +// await sleep(4000); + +// for (const node of nodes) { +// const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); +// contexts.push(context); +// } + +// // now ensure that all txs were successfully mined +// await Promise.all( +// contexts.flatMap((context, i) => +// context.txs.map(async (tx, j) => { +// logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); +// return tx.wait(); +// }), +// ), +// ); + +// // shutdown all nodes. +// await stopNodes(bootstrapNode, nodes); +// }); + + + + + + +// }); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 560677f6bee..99cdf0aa007 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -10,8 +10,10 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { generatePrivateKey } from 'viem/accounts'; + import { getPrivateKeyFromIndex } from './utils.js'; + export interface NodeContext { node: AztecNodeService; pxeService: PXEService; @@ -28,6 +30,7 @@ export function generatePeerIdPrivateKeys(numberOfPeers: number): string[] { return peerIdPrivateKeys; } + export async function createNodes( config: AztecNodeConfig, peerIdPrivateKeys: string[], diff --git a/yarn-project/simulator/src/public/index.ts b/yarn-project/simulator/src/public/index.ts index 278c86d25bd..cfed1b576ea 100644 --- a/yarn-project/simulator/src/public/index.ts +++ b/yarn-project/simulator/src/public/index.ts @@ -8,4 +8,5 @@ export * from './public_db_sources.js'; export * from './public_kernel.js'; export * from './public_kernel_circuit_simulator.js'; export { PublicProcessor, PublicProcessorFactory } from './public_processor.js'; +export { LightPublicProcessor, LightPublicProcessorFactory } from './light_public_processor.js'; export { PublicSideEffectTrace } from './side_effect_trace.js'; diff --git a/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts b/yarn-project/simulator/src/public/light_public_processor.test.ts similarity index 98% rename from yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts rename to yarn-project/simulator/src/public/light_public_processor.test.ts index 9b0120740a5..50eb1cdb6f8 100644 --- a/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor2.test.ts +++ b/yarn-project/simulator/src/public/light_public_processor.test.ts @@ -3,6 +3,7 @@ import { PublicExecutionRequest, SimulationError, type TreeInfo, + TxValidator, mockTx, } from '@aztec/circuit-types'; import { @@ -30,6 +31,7 @@ import { fr, makeSelector } from '@aztec/circuits.js/testing'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; import { + LightPublicProcessor, type PublicExecutionResult, type PublicExecutor, WorldStateDB, @@ -41,17 +43,17 @@ import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { PublicExecutionResultBuilder, makeFunctionCall } from '../mocks/fixtures.js'; -import { PublicProcessor2 } from '../public-2-electric-boogaloo/public_processor.js'; import { Spied, SpyInstance } from 'jest-mock'; describe('public_processor', () => { let db: MockProxy; let worldStateDB: MockProxy; + let txValidator: MockProxy>; let publicExecutorSpy: any; let root: Buffer; - let processor: PublicProcessor2; + let processor: LightPublicProcessor; beforeEach(() => { db = mock(); @@ -97,11 +99,14 @@ describe('public_processor', () => { db.getPreviousValueIndex.mockResolvedValue({ index: 0n, alreadyPresent: true }); db.getLeafPreimage.mockResolvedValue(new PublicDataTreeLeafPreimage(new Fr(0), new Fr(0), new Fr(0), 0n)); - processor = new PublicProcessor2( + txValidator = mock>(); + + processor = new LightPublicProcessor( db, worldStateDB, GlobalVariables.from({ ...GlobalVariables.empty(), gasFees: GasFees.default() }), header, + txValidator, new NoopTelemetryClient(), ); diff --git a/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts similarity index 66% rename from yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts rename to yarn-project/simulator/src/public/light_public_processor.ts index 198fa138a76..bb940150f0d 100644 --- a/yarn-project/simulator/src/public-2-electric-boogaloo/public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -1,19 +1,55 @@ // A minimal version of the public processor - that does not have the fluff -import { Tx } from "@aztec/circuit-types"; +import { MerkleTreeId, Tx, TxValidator, WorldStateSynchronizer } from "@aztec/circuit-types"; import { Gas, GlobalVariables, Header, Nullifier } from "@aztec/circuits.js"; -import { ContractDataSource } from "@aztec/types/contracts"; import { MerkleTreeOperations } from "@aztec/world-state"; -import { WorldStateDB } from "../public/public_db_sources.js"; -import { PublicExecutor } from "../public/executor.js"; +import { WorldStateDB } from "./public_db_sources.js"; +import { PublicExecutor } from "./executor.js"; import { TelemetryClient } from "@aztec/telemetry-client"; -import { PublicExecutionResult } from "../public/execution.js"; -import { ResetSimulatedArtifacts } from "@aztec/noir-protocol-circuits-types"; +import { PublicExecutionResult } from "./execution.js"; +import { ContractDataSource } from "@aztec/types/contracts"; + +export class LightPublicProcessorFactory { + constructor( + private worldStateSynchronizer: WorldStateSynchronizer, + private contractDataSource: ContractDataSource, + private telemetryClient: TelemetryClient + ) {} + + // TODO: using the same interface as the orther public processor factory + // But can probably do alot better here in debt cleanup + public async createWithSyncedState( + targetBlockNumber: number, + maybeHistoricalHeader: Header | undefined, + globalVariables: GlobalVariables, + txValidator: TxValidator + ) { + // TODO: should this be here? + // TODO: is this safe? + // Make sure the world state synchronizer is synced + await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); + const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); + // TODO(md): we need to first make sure that we are synced + const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); + const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); + return new LightPublicProcessor(merkleTrees, worldStateDB, globalVariables, historicalHeader, txValidator, this.telemetryClient); + } +} -export class PublicProcessor2 { + +export class InvalidTransactionsFound extends Error { + constructor() { + super("Double spend tx found"); + } +} + +/** + * A variant of the public processor that does not run the kernel circuits + */ +export class LightPublicProcessor { public publicExecutor: PublicExecutor; @@ -28,6 +64,7 @@ export class PublicProcessor2 { private globalVariables: GlobalVariables, // TODO(md): check if thies can be inferred private historicalHeader: Header, + private txValidator: TxValidator, private telemetryClient: TelemetryClient ) { this.publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); @@ -38,6 +75,15 @@ export class PublicProcessor2 { } + // NOTE: does not have the archive + public async getTreeSnapshots() { + return Promise.all([ + this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), + ]); + } public async process(txs: Tx[]) { txs = txs.map(tx => Tx.clone(tx)); @@ -45,8 +91,16 @@ export class PublicProcessor2 { // TODO: maybe there is some extra validation to make sure that we do not overrun // the checks that the kernel should have been doing? // If not we add them to the vm execution + const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + + // If any of the transactions are invalid, we throw an error + if (invalidTxs.length > 0) { + throw new InvalidTransactionsFound(); + } for (const tx of txs) { + // We have already validated the txs, so we can just apply the state updates + // We always apply the private part of a transaction await this.applyPrivateStateUpdates(tx); @@ -59,7 +113,20 @@ export class PublicProcessor2 { } public async applyPrivateStateUpdates(tx: Tx) { + // TODO(md): do the last block number check??? + + // TODO: read this from forPublic or forRollup??? + if (tx.data.forRollup) { + // TODO: do we insert all or just non empty?? + const {nullifiers, noteHashes} = tx.data.forRollup.end; + if (nullifiers) { + this.merkleTrees.appendLeaves(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer())); + } + if (noteHashes) { + this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); + } + } } // NOTES: diff --git a/yarn-project/validator-client/package.json b/yarn-project/validator-client/package.json index 23ca3b59106..ced03c4c44f 100644 --- a/yarn-project/validator-client/package.json +++ b/yarn-project/validator-client/package.json @@ -61,6 +61,7 @@ "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/p2p": "workspace:^", + "@aztec/simulator": "workspace:^", "@aztec/types": "workspace:^", "koa": "^2.14.2", "koa-router": "^12.0.0", diff --git a/yarn-project/validator-client/src/errors/validator.error.ts b/yarn-project/validator-client/src/errors/validator.error.ts index 73c3eaf2729..0c1bac26ead 100644 --- a/yarn-project/validator-client/src/errors/validator.error.ts +++ b/yarn-project/validator-client/src/errors/validator.error.ts @@ -23,3 +23,21 @@ export class TransactionsNotAvailableError extends ValidatorError { super(`Transactions not available: ${txHashes.join(', ')}`); } } + +export class FailedToReExecuteTransactionsError extends ValidatorError { + constructor(txHashes: TxHash[]) { + super(`Failed to re-execute transactions: ${txHashes.join(', ')}`); + } +} + +export class ReExStateMismatchError extends ValidatorError { + constructor() { + super('Re-execution state mismatch'); + } +} + +export class PublicProcessorNotProvidedError extends ValidatorError { + constructor() { + super('Public processor not provided'); + } +} diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 1438448d850..4fad4d4a0f0 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -4,8 +4,16 @@ import { generatePrivateKey } from 'viem/accounts'; import { type ValidatorClientConfig } from './config.js'; import { ValidatorClient } from './validator.js'; +import { LightPublicProcessorFactory } from '@aztec/simulator'; +import { ArchiveSource } from '@aztec/archiver'; +import { createWorldStateSynchronizer } from '../../world-state/dest/synchronizer/factory.js'; +import { TelemetryClient } from '../../telemetry-client/src/telemetry.js'; +import { WorldStateConfig } from '../../world-state/dest/synchronizer/config.js'; +import { DataStoreConfig } from '@aztec/kv-store/utils'; +import { WorldStateSynchronizer } from '@aztec/circuit-types'; -export function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P) { +// TODO: TODO: make the archiver optional??? +export async function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P, worldStateSynchronizer: WorldStateSynchronizer, archiver: ArchiveSource, telemetry: TelemetryClient) { if (config.disableValidator) { return undefined; } @@ -13,5 +21,18 @@ export function createValidatorClient(config: ValidatorClientConfig, p2pClient: if (config.validatorPrivateKey === undefined || config.validatorPrivateKey === '') { config.validatorPrivateKey = generatePrivateKey(); } + + if (config.validatorReEx) { + // It need to be able to create a public processor from somewhere? + // What on earth does it need to do this -> check the sequencer code for this + + const publicProcessorFactory = new LightPublicProcessorFactory( + worldStateSynchronizer, + archiver, + telemetry + ); + return ValidatorClient.new(config, p2pClient, publicProcessorFactory); + } + return ValidatorClient.new(config, p2pClient); } diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index fc870184fec..10cf1fc2dc5 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -6,6 +6,7 @@ import { makeHeader } from '@aztec/circuits.js/testing'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type P2P } from '@aztec/p2p'; +import { LightPublicProcessor, LightPublicProcessorFactory } from '@aztec/simulator'; import { describe, expect, it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -16,6 +17,7 @@ import { type ValidatorClientConfig } from './config.js'; import { AttestationTimeoutError, InvalidValidatorPrivateKeyError, + PublicProcessorNotProvidedError, TransactionsNotAvailableError, } from './errors/validator.error.js'; import { ValidatorClient } from './validator.js'; @@ -25,11 +27,18 @@ describe('ValidationService', () => { let validatorClient: ValidatorClient; let p2pClient: MockProxy; let validatorAccount: PrivateKeyAccount; + let lightPublicProcessorFactory: MockProxy; + let lightPublicProcessor: MockProxy; beforeEach(() => { p2pClient = mock(); p2pClient.getAttestationsForSlot.mockImplementation(() => Promise.resolve([])); + lightPublicProcessorFactory = mock(); + lightPublicProcessor = mock(); + + lightPublicProcessorFactory.createWithSyncedState.mockImplementation(() => Promise.resolve(lightPublicProcessor)); + const validatorPrivateKey = generatePrivateKey(); validatorAccount = privateKeyToAccount(validatorPrivateKey); @@ -38,6 +47,7 @@ describe('ValidationService', () => { attestationPoolingIntervalMs: 1000, attestationWaitTimeoutMs: 1000, disableValidator: false, + validatorReEx: false, }; validatorClient = ValidatorClient.new(config, p2pClient); }); @@ -47,6 +57,11 @@ describe('ValidationService', () => { expect(() => ValidatorClient.new(config, p2pClient)).toThrow(InvalidValidatorPrivateKeyError); }); + it("Should throw an error if re-execution is enabled but no public processor is provided", () => { + config.validatorReEx = true; + expect(() => ValidatorClient.new(config, p2pClient)).toThrow(PublicProcessorNotProvidedError); + }); + it('Should create a valid block proposal', async () => { const header = makeHeader(); const archive = Fr.random(); @@ -78,4 +93,19 @@ describe('ValidationService', () => { TransactionsNotAvailableError, ); }); + + it("Should not return an attestation if re-execution fails", async () => { + const proposal = makeBlockProposal(); + + // mock the p2pClient.getTxStatus to return undefined for all transactions + p2pClient.getTxStatus.mockImplementation(() => undefined); + + const val = ValidatorClient.new(config, p2pClient, lightPublicProcessorFactory); + lightPublicProcessor.process.mockImplementation(() => { + throw new Error(); + }); + + const attestation = await val.attestToProposal(proposal); + expect(attestation).toBeUndefined(); + }) }); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 6790c0fd563..2c92053eb91 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,20 +1,24 @@ -import { type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; +import { MerkleTreeId, Tx, type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; -import { type P2P } from '@aztec/p2p'; +import { DoubleSpendTxValidator, type P2P } from '@aztec/p2p'; import { type ValidatorClientConfig } from './config.js'; import { ValidationService } from './duties/validation_service.js'; import { AttestationTimeoutError, + FailedToReExecuteTransactionsError, InvalidValidatorPrivateKeyError, + PublicProcessorNotProvidedError, TransactionsNotAvailableError, + ReExStateMismatchError } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; +import { LightPublicProcessor, LightPublicProcessorFactory } from '@aztec/simulator'; export interface Validator { start(): Promise; @@ -28,6 +32,7 @@ export interface Validator { collectAttestations(proposal: BlockProposal, numberOfRequiredAttestations: number): Promise; } + /** Validator Client */ export class ValidatorClient implements Validator { @@ -37,19 +42,23 @@ export class ValidatorClient implements Validator { keyStore: ValidatorKeyStore, private p2pClient: P2P, private config: ValidatorClientConfig, + private lightPublicProcessorFactory?: LightPublicProcessorFactory | undefined, private log = createDebugLogger('aztec:validator'), ) { - //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 - this.validationService = new ValidationService(keyStore); this.log.verbose('Initialized validator'); } - static new(config: ValidatorClientConfig, p2pClient: P2P) { + static new(config: ValidatorClientConfig, p2pClient: P2P, publicProcessorFactory?: LightPublicProcessorFactory | undefined) { if (!config.validatorPrivateKey) { throw new InvalidValidatorPrivateKeyError(); } + //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 + if (config.validatorReEx && publicProcessorFactory === undefined){ + throw new PublicProcessorNotProvidedError(); + } + const privateKey = validatePrivateKey(config.validatorPrivateKey); const localKeyStore = new LocalKeyStore(privateKey); @@ -57,6 +66,7 @@ export class ValidatorClient implements Validator { localKeyStore, p2pClient, config, + publicProcessorFactory, ); validator.registerBlockProposalHandler(); return validator; @@ -83,8 +93,8 @@ export class ValidatorClient implements Validator { await this.ensureTransactionsAreAvailable(proposal); if (this.config.validatorReEx) { - this.log.debug(`Re-executing transactions in the proposal before attesting`); - // await this.reExecuteTransactions(proposal); + this.log.verbose(`Re-executing transactions in the proposal before attesting`); + await this.reExecuteTransactions(proposal); } } catch (error: any) { @@ -101,6 +111,72 @@ export class ValidatorClient implements Validator { return this.validationService.attestToProposal(proposal); } + async reExecuteTransactions(proposal: BlockProposal) { + // TODO(md): currently we are not running the rollups, so cannot calcualte the archive + // - use pallas new work to do this + const {header, txHashes} = proposal.payload; + + const txs = await Promise.all(txHashes.map(tx => this.p2pClient.getTxByHash(tx))); + const filteredTransactions = txs.filter(tx => tx !== undefined); + + // TODO: messy af - think about this more + if (filteredTransactions.length !== txHashes.length) { + this.log.error(`Failed to get transactions from the network: ${txHashes.join(', ')}`); + throw new TransactionsNotAvailableError(txHashes); + } + + if (!this.lightPublicProcessorFactory) { + throw new PublicProcessorNotProvidedError(); + } + + // TODO(md): make the transaction validator here + // TODO(md): for now breaking the rules and makeing the world state sync public on the factory + + const txValidator = new DoubleSpendTxValidator((this.lightPublicProcessorFactory as any).worldStateSynchronizer); + // We force the state to be synced here + const targetBlockNumber = header.globalVariables.blockNumber.toNumber() - 1; + + this.log.verbose(`Re-ex: Syncing state to block number ${targetBlockNumber}`); + const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState(targetBlockNumber, undefined, header.globalVariables, txValidator); + + this.log.verbose(`Re-ex: Re-executing transactions`); + const txResults = await lightProcessor.process(filteredTransactions as Tx[]); + this.log.verbose(`Re-ex: Re-execution complete`); + + // TODO: update this to check the archive matches + if (txResults === undefined) { + this.log.error(`Failed to re-execute transactions: ${txs.join(', ')}`); + throw new FailedToReExecuteTransactionsError(txHashes); + } + + const [ + newNullifierTree, + newNoteHashTree, + newPublicDataTree, + newL1ToL2MessageTree, + ] = await lightProcessor.getTreeSnapshots(); + + // Check the header has this information + // TODO: replace with archive check once we have it + if (header.state.l1ToL2MessageTree.root.toBuffer() !== (await newL1ToL2MessageTree).root) { + this.log.error(`Re-ex: l1ToL2MessageTree does not match`); + throw new ReExStateMismatchError(); + } + if (header.state.partial.nullifierTree.root.toBuffer() !== (await newNullifierTree).root) { + this.log.error(`Re-ex: nullifierTree does not match`); + throw new ReExStateMismatchError(); + } + if (header.state.partial.noteHashTree.root.toBuffer() !== (await newNoteHashTree).root) { + this.log.error(`Re-ex: noteHashTree does not match`); + throw new ReExStateMismatchError(); + } + if (header.state.partial.publicDataTree.root.toBuffer() !== (await newPublicDataTree).root) { + this.log.error(`Re-ex: publicDataTree does not match`); + throw new ReExStateMismatchError(); + } + + } + /** * Ensure that all of the transactions in the proposal are available in the tx pool before attesting * diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 677a033c70b..c1e6d5757ed 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1223,6 +1223,7 @@ __metadata: "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/p2p": "workspace:^" + "@aztec/simulator": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 "@types/jest": ^29.5.0 From a7124ae14453a19d4f1c4b603a9b6e48460b2255 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 25 Sep 2024 17:03:39 +0000 Subject: [PATCH 056/123] add: reex test --- .../src/structs/public_data_update_request.ts | 11 + .../end-to-end/src/e2e_p2p_network.test.ts | 42 -- .../end-to-end/src/e2e_p2p_reex.test.ts | 369 ++++++++++-------- .../orchestrator/block-building-helpers.ts | 5 + .../src/block_builder/light.ts | 2 +- .../src/public/light_public_processor.ts | 171 ++++++-- .../validator-client/src/validator.ts | 35 +- 7 files changed, 397 insertions(+), 238 deletions(-) diff --git a/yarn-project/circuits.js/src/structs/public_data_update_request.ts b/yarn-project/circuits.js/src/structs/public_data_update_request.ts index d048a1100a4..79bc9546950 100644 --- a/yarn-project/circuits.js/src/structs/public_data_update_request.ts +++ b/yarn-project/circuits.js/src/structs/public_data_update_request.ts @@ -1,7 +1,11 @@ +import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { inspect } from 'util'; +import { GeneratorIndex } from '../constants.gen.js'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { ContractStorageUpdateRequest } from './contract_storage_update_request.js'; /** * Write operations on the public data tree including the previous value. @@ -74,6 +78,13 @@ export class PublicDataUpdateRequest { return new PublicDataUpdateRequest(Fr.fromBuffer(reader), Fr.fromBuffer(reader), reader.readNumber()); } + static fromContractStorageUpdateRequest(contractAddress: AztecAddress, updateRequest: ContractStorageUpdateRequest) { + const leafSlot = poseidon2HashWithSeparator([contractAddress, updateRequest.storageSlot], GeneratorIndex.PUBLIC_LEAF_INDEX); + + return new PublicDataUpdateRequest(leafSlot, updateRequest.newValue, updateRequest.counter); + + } + static empty() { return new PublicDataUpdateRequest(Fr.ZERO, Fr.ZERO, 0); } diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 982fedf479e..544b319acae 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -230,48 +230,6 @@ describe('e2e_p2p_network', () => { await stopNodes(bootstrapNode, nodes); }); - it.only('should rollup txs from all peers with re-execution', async () => { - // create the bootstrap node for the network - if (!bootstrapNodeEnr) { - throw new Error('Bootstrap node ENR is not available'); - } - - config.validatorReEx = true; - - // create our network of nodes and submit txs into each of them - // the number of txs per node and the number of txs per rollup - // should be set so that the only way for rollups to be built - // is if the txs are successfully gossiped around the nodes. - const contexts: NodeContext[] = []; - const nodes: AztecNodeService[] = await createNodes( - config, - PEER_ID_PRIVATE_KEYS, - bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); - - // wait a bit for peers to discover each other - await sleep(4000); - - for (const node of nodes) { - const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); - contexts.push(context); - } - - // now ensure that all txs were successfully mined - await Promise.all( - contexts.flatMap((context, i) => - context.txs.map(async (tx, j) => { - logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - return tx.wait(); - }), - ), - ); - - // shutdown all nodes. - await stopNodes(bootstrapNode, nodes); - }); it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; diff --git a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts index e9a15f912d9..19515210192 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts @@ -1,153 +1,216 @@ -// /** -// * A test where the validator nodes re-execute transactions from a block before attesting to it. -// */ - -// import { privateKeyToAccount } from "viem/accounts"; -// import { getPrivateKeyFromIndex, setup } from "./fixtures/utils.js"; -// import { EthAddress, ETHEREUM_SLOT_DURATION } from "@aztec/circuits.js"; -// import { AztecNodeConfig, AztecNodeService } from "@aztec/aztec-node"; -// import { BootstrapNode } from "@aztec/p2p"; -// import { -// CompleteAddress, -// type DebugLogger, -// type DeployL1Contracts, -// EthCheatCodes, -// Fr, -// GrumpkinScalar, -// type SentTx, -// TxStatus, -// sleep, -// } from '@aztec/aztec.js'; -// import { createBootstrapNode, createNodes, NodeContext } from "./fixtures/setup_p2p_test.js"; -// import { getContract } from "viem"; -// import { RollupAbi } from "@aztec/l1-artifacts"; - -// const NUM_NODES = 2; -// const NUM_TX_PER_BLOCK = 4; -// const NUM_TX_PER_NODE = 2; -// const BOOT_NODE_UDP_PORT = 40400; - -// const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); - -// describe('e2e_p2p_reex', () => { -// let config: AztecNodeConfig; -// let logger: DebugLogger; -// let teardown: () => Promise; -// let bootstrapNode: BootstrapNode; -// let bootstrapNodeEnr: string; -// let deployL1ContractsValues: DeployL1Contracts; - - - -// beforeEach(async () => { - -// const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); -// const initialValidators = [EthAddress.fromString(account.address)]; - -// ({ -// teardown, -// config, -// logger, -// deployL1ContractsValues -// } = await setup(0, { -// initialValidators, -// l1BlockTime: ETHEREUM_SLOT_DURATION, -// salt: 420, -// })); - - -// bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); -// bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - -// config.minTxsPerBlock = NUM_TX_PER_BLOCK; -// config.maxTxsPerBlock = NUM_TX_PER_BLOCK; - -// const rollup = getContract({ -// address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), -// abi: RollupAbi, -// client: deployL1ContractsValues.walletClient, -// }); - -// for (let i = 0; i < NUM_NODES; i++) { -// const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); -// await rollup.write.addValidator([account.address]); -// logger.debug(`Adding ${account.address} as validator`); -// } - -// // Remove the initial sequencer from the set! This was the sequencer we used for perform the setup. -// logger.debug(`Removing ${account.address} as validator`); -// const txHash = await rollup.write.removeValidator([account.address]); - -// await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); - -// // TODO(md): fix this case -// //@note Now we jump ahead to the next epoch such that the validator committee is picked -// // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! -// // Which means that the validator set will still be empty! So anyone can propose. -// const slotsInEpoch = await rollup.read.EPOCH_DURATION(); -// const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); -// const cheatCodes = new EthCheatCodes(config.l1RpcUrl); - -// try { -// await cheatCodes.warp(Number(timestamp)); -// } catch (err) { -// logger.debug('Warp failed, time already satisfied'); -// } - -// // Send and await a tx to make sure we mine a block for the warp to correctly progress. -// await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ -// hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), -// }); -// }); - - -// it('should rollup txs from all peers', async () => { -// // create the bootstrap node for the network -// if (!bootstrapNodeEnr) { -// throw new Error('Bootstrap node ENR is not available'); -// } - -// // Trigger re-execution -// config.validatorReEx = true; - -// // create our network of nodes and submit txs into each of them -// // the number of txs per node and the number of txs per rollup -// // should be set so that the only way for rollups to be built -// // is if the txs are successfully gossiped around the nodes. -// const contexts: NodeContext[] = []; -// const nodes: AztecNodeService[] = await createNodes( -// config, -// PEER_ID_PRIVATE_KEYS, -// bootstrapNodeEnr, -// NUM_NODES, -// BOOT_NODE_UDP_PORT, -// ); - -// // wait a bit for peers to discover each other -// await sleep(4000); - -// for (const node of nodes) { -// const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); -// contexts.push(context); -// } - -// // now ensure that all txs were successfully mined -// await Promise.all( -// contexts.flatMap((context, i) => -// context.txs.map(async (tx, j) => { -// logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); -// return tx.wait(); -// }), -// ), -// ); - -// // shutdown all nodes. -// await stopNodes(bootstrapNode, nodes); -// }); - - - - - - -// }); +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; +import { + AccountWalletWithSecretKey, + CompleteAddress, + type DebugLogger, + type DeployL1Contracts, + EthCheatCodes, + Fr, + GrumpkinScalar, + type SentTx, + TxStatus, + sleep, +} from '@aztec/aztec.js'; +import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; +import { RollupAbi } from '@aztec/l1-artifacts'; +import { type BootstrapNode } from '@aztec/p2p'; +import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; + +import { jest } from '@jest/globals'; +import fs from 'fs'; +import { getContract } from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; +import { SpamContract, TokenContract } from '@aztec/noir-contracts.js'; + +import { + createBootstrapNode, + createNodes, + generatePeerIdPrivateKeys, +} from './fixtures/setup_p2p_test.js'; +import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; +import { createAccounts } from '@aztec/accounts/testing'; + +// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds +const NUM_NODES = 2; +const NUM_TXS_PER_BLOCK = 2; +const NUM_TXS_PER_NODE = 2; +const BOOT_NODE_UDP_PORT = 40400; + +const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); + +describe('e2e_p2p_network', () => { + let config: AztecNodeConfig; + let logger: DebugLogger; + let teardown: () => Promise; + let bootstrapNode: BootstrapNode; + let bootstrapNodeEnr: string; + let deployL1ContractsValues: DeployL1Contracts; + + let token: TokenContract; + let spam: SpamContract; + + beforeEach(async () => { + // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` + const useLocalHost = false; + if (useLocalHost) { + jest.setTimeout(300_000); + } + const options = useLocalHost ? { l1RpcUrl: 'http://127.0.0.1:8545' } : {}; + + // We need the very first node to be the sequencer for this is the one doing everything throughout the setup. + // Without it we will wait forever. + const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); + + const initialValidators = [EthAddress.fromString(account.address)]; + + ({ teardown, config, logger, deployL1ContractsValues} = await setup(0, { + initialValidators, + l1BlockTime: ETHEREUM_SLOT_DURATION, + salt: 420, + ...options, + })); + + bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); + bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); + + config.minTxsPerBlock = 1; + config.maxTxsPerBlock = NUM_TXS_PER_BLOCK; // + + const rollup = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: deployL1ContractsValues.walletClient, + }); + + for (let i = 0; i < NUM_NODES; i++) { + const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); + await rollup.write.addValidator([account.address]); + logger.debug(`Adding ${account.address} as validator`); + } + + // Remove the initial sequencer from the set! This was the sequencer we used for perform the setup. + logger.debug(`Removing ${account.address} as validator`); + const txHash = await rollup.write.removeValidator([account.address]); + + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); + + //@note Now we jump ahead to the next epoch such that the validator committee is picked + // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! + // Which means that the validator set will still be empty! So anyone can propose. + const slotsInEpoch = await rollup.read.EPOCH_DURATION(); + const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); + const cheatCodes = new EthCheatCodes(config.l1RpcUrl); + try { + await cheatCodes.warp(Number(timestamp)); + } catch (err) { + logger.debug('Warp failed, time already satisfied'); + } + + // Send and await a tx to make sure we mine a block for the warp to correctly progress. + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), + }); + }); + + const stopNodes = async (bootstrap: BootstrapNode, nodes: AztecNodeService[]) => { + for (const node of nodes) { + await node.stop(); + } + await bootstrap.stop(); + }; + + afterEach(() => teardown()); + + afterAll(() => { + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + } + }); + + it.only('should rollup txs from all peers with re-execution', async () => { + // create the bootstrap node for the network + if (!bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + + config.validatorReEx = true; + + // create our network of nodes and submit txs into each of them + // the number of txs per node and the number of txs per rollup + // should be set so that the only way for rollups to be built + // is if the txs are successfully gossiped around the nodes. + const nodes: AztecNodeService[] = await createNodes( + config, + PEER_ID_PRIVATE_KEYS, + bootstrapNodeEnr, + NUM_NODES, + BOOT_NODE_UDP_PORT, + ); + + // spam = await SpamContract.deploy(wallet).send().deployed(); + + // wait a bit for peers to discover each other + await sleep(4000); + + // Just use the first node for the PXE + const pxeService = await createPXEService(nodes[0], getRpcConfig(), true); + + // tODO: use a tx with nested calls + const txs = await submitTxsTo(pxeService, NUM_TXS_PER_BLOCK); + nodes.forEach(node => { + node.getSequencer()?.updateSequencerConfig( { + minTxsPerBlock: NUM_TXS_PER_BLOCK, + maxTxsPerBlock: NUM_TXS_PER_BLOCK, + }); + }); + + // now ensure that all txs were successfully mined + await Promise.all( + txs.map(async (tx, i) => { + logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ); + + // shutdown all nodes. + await stopNodes(bootstrapNode, nodes); + }); + + // submits a set of transactions to the provided Private eXecution Environment (PXE) + const submitTxsTo = async (pxe: PXEService, numTxs: number) => { + const txs: SentTx[] = []; + + // TODO: set min tx to 1 and see the re-ex padding + + const wallet = (await createAccounts(pxe, 2))[0]; + + // add another account creation to pad the tx below - TODO(md): clean up + createAccounts(pxe,1); + token = await TokenContract.deploy(wallet, wallet.getAddress(), 'TestToken', 'TST', 18n).send().deployed(); + // spam = await SpamContract.deploy(wallet).send().deployed(); + + for (let i = 0; i < numTxs; i++) { + const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); + + // TODO: check out batch call for deployments + + // Send a public mint tx - this will be minted from the token contract to the pxe account + const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() + // const tx = spam.methods.spam(420n, 15, true).send() + const txHash = await tx.getTxHash(); + + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; + }; +}); diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index dd798f38b35..ddc2c542150 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -44,6 +44,7 @@ import { VK_TREE_HEIGHT, type VerificationKeyAsFields, type VerificationKeyData, + getNonEmptyItems, } from '@aztec/circuits.js'; import { assertPermutation, makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; @@ -123,6 +124,7 @@ export async function buildBaseRollupInput( tx.data.end.nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT, ); + if (nullifierWitnessLeaves === undefined) { throw new Error(`Could not craft nullifier batch insertion proofs`); } @@ -424,6 +426,9 @@ export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: Merkl const allPublicDataWrites = allPublicDataUpdateRequests.map( ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), ); + + console.log("number of public data writes", allPublicDataWrites.length); + console.log("all public data writes", getNonEmptyItems(allPublicDataWrites)); const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), diff --git a/yarn-project/sequencer-client/src/block_builder/light.ts b/yarn-project/sequencer-client/src/block_builder/light.ts index 7443087bd6a..074734168da 100644 --- a/yarn-project/sequencer-client/src/block_builder/light.ts +++ b/yarn-project/sequencer-client/src/block_builder/light.ts @@ -45,7 +45,7 @@ export class LightweightBlockBuilder implements BlockSimulator { constructor(private db: MerkleTreeOperations, private telemetry: TelemetryClient) {} async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { - this.logger.verbose('Starting new block', { numTxs, globalVariables, l1ToL2Messages }); + this.logger.verbose('Starting new block', { numTxs, globalVariables: globalVariables.toJSON(), l1ToL2Messages }); this.numTxs = numTxs; this.globalVariables = globalVariables; this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index bb940150f0d..afbfa7ce767 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -1,13 +1,18 @@ // A minimal version of the public processor - that does not have the fluff -import { MerkleTreeId, Tx, TxValidator, WorldStateSynchronizer } from "@aztec/circuit-types"; -import { Gas, GlobalVariables, Header, Nullifier } from "@aztec/circuits.js"; +import { MerkleTreeId, PublicDataWrite, Tx, TxValidator, WorldStateSynchronizer } from "@aztec/circuit-types"; +import { AztecAddress, Gas, getNonEmptyItems, GlobalVariables, Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NoteHash, Nullifier, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, PublicDataUpdateRequest, ScopedNullifier } from "@aztec/circuits.js"; import { MerkleTreeOperations } from "@aztec/world-state"; import { WorldStateDB } from "./public_db_sources.js"; import { PublicExecutor } from "./executor.js"; import { TelemetryClient } from "@aztec/telemetry-client"; import { PublicExecutionResult } from "./execution.js"; import { ContractDataSource } from "@aztec/types/contracts"; +import { padArrayEnd } from "@aztec/foundation/collection"; +import { Fr } from "@aztec/foundation/fields"; +import { siloNoteHash, siloNullifier } from "@aztec/circuits.js/hash"; +import { buffer } from "stream/consumers"; +import { makeTuple } from "@aztec/foundation/array"; export class LightPublicProcessorFactory { constructor( @@ -85,48 +90,115 @@ export class LightPublicProcessor { ]); } + // Execution invariants + // If any of the public calls are invalid, then we need to roll them back public async process(txs: Tx[]) { + // TODO(md): Note these will need to padded to a power of 2 txs = txs.map(tx => Tx.clone(tx)); // TODO: maybe there is some extra validation to make sure that we do not overrun // the checks that the kernel should have been doing? // If not we add them to the vm execution - const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + console.log("validating txs"); + // TODO(md): skiping tx validation for now as validator is not well defined + // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + + // // If any of the transactions are invalid, we throw an error + // if (invalidTxs.length > 0) { + // console.log("invalid txs found"); + // throw new InvalidTransactionsFound(); + // } + for (const tx of txs) { + if (tx.hasPublicCalls()) { + const publicExecutionResults = await this.executePublicCalls(tx); + // TODO: handle this + if (!publicExecutionResults) { + throw new Error("Public execution results are undefined"); + } - // If any of the transactions are invalid, we throw an error - if (invalidTxs.length > 0) { - throw new InvalidTransactionsFound(); - } + // TODO: throw if this is not defined + if (tx.data.forPublic) { + // tODO: put this in a function + const {nullifiers, newNoteHashes, publicDataWrites} = publicExecutionResults; + const {nullifiers: nonRevertibleNullifiers, noteHashes: nonRevertibleNoteHashes, publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests} = tx.data.forPublic!.endNonRevertibleData; + const {nullifiers: txNullifiers, noteHashes: txNoteHashes, publicDataUpdateRequests: txPublicDataUpdateRequests} = tx.data.forPublic!.end; - for (const tx of txs) { - // We have already validated the txs, so we can just apply the state updates + const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); + const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); + const publicNullifiers = nullifiers.map(n => siloNullifier(n.contractAddress, n.value)); - // We always apply the private part of a transaction - await this.applyPrivateStateUpdates(tx); + const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => siloNoteHash(n.contractAddress, n.noteHash.value)); + const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => siloNoteHash(n.contractAddress, n.noteHash.value)); + const publicNoteHashes = newNoteHashes.map(n => n.value); - // If the transaction has public calls, we then apply the public state upates - if (tx.hasPublicCalls()) { - await this.executePublicCalls(tx); + const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); + const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter(p => !p.isEmpty()); + const nonEmptyPublicDataWrites = publicDataWrites.filter(p => !p.isEmpty()); + + const allNullifiers = padArrayEnd([...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()); + const allNoteHashes = padArrayEnd([...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], Fr.ZERO, MAX_NOTE_HASHES_PER_TX); + const allPublicDataUpdateRequests = padArrayEnd([...nonEmptyNonRevertiblePublicDataUpdateRequests, ...nonEmptyTxPublicDataUpdateRequests, ...nonEmptyPublicDataWrites], PublicDataUpdateRequest.empty(), MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX); + + // TODO: refactor this + const allPublicDataWrites = allPublicDataUpdateRequests.map( + ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), + ); + + // TODO: i think im going to need to silo these + await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, allNoteHashes); + await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, allNullifiers, NULLIFIER_SUBTREE_HEIGHT); + + console.log("number of public data writes", allPublicDataWrites.length); + console.log("all public data writes", getNonEmptyItems(allPublicDataWrites)); + const beingWritten = allPublicDataWrites.map(x => x.toBuffer()); + await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, beingWritten, PUBLIC_DATA_SUBTREE_HEIGHT); + + } + } else { + // console.log("root before", await this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE)); + await this.applyPrivateStateUpdates(tx); + + // Apply empty public data writes + console.log("applying empty public data writes"); + const emptyPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, () => PublicDataTreeLeaf.empty()); + await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, emptyPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); + // console.log("root after", await this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE)); + // console.log("applied private state updates"); } } } + // Ok: this is going to be done in the larger light block builder???? + // Theres no point in doing it here + // If the public part fails, then we need to rollback the trees here + // Can the public part use commitments that have just been created? + + // There is an assumption based on how the block builder works, that the transactions + // provided here CANNOT revert, else they are not added to the block as the kernels + // will fail + // This will change whenever the vm is changed to be able to revert public async applyPrivateStateUpdates(tx: Tx) { // TODO(md): do the last block number check??? // TODO: read this from forPublic or forRollup??? + // We could probably run this after if the tx reverts + // Does the rollup currently insert reverted transactions?? + let insertionPromises: Promise[] = []; if (tx.data.forRollup) { // TODO: do we insert all or just non empty?? const {nullifiers, noteHashes} = tx.data.forRollup.end; if (nullifiers) { - this.merkleTrees.appendLeaves(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer())); + // TODO: note this returns a few things for the circuits to use + insertionPromises.push(this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT)); } if (noteHashes) { - this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); + insertionPromises.push(this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes)); } } + + await Promise.all(insertionPromises); } // NOTES: @@ -142,27 +214,38 @@ export class LightPublicProcessor { // - This runs with the public kernel app logic // 3. Teardown Call - This is the public teardown call that does fee refunds // - This runs with the public kernel teardown + await this.worldStateDB.addNewContracts(tx); + const nonRevertibleCalls = tx.getNonRevertiblePublicExecutionRequests(); const publicCalls = tx.getRevertiblePublicExecutionRequests(); const teardownCall = tx.getPublicTeardownExecutionRequest()!; + // TODO(md): use exceptions to manage differing control flow of reverts + // TODO: there is some other stuff done in the public processor // rationalise it and find out where else it can be done const transactionGas = tx.data.constants.txContext.gasSettings.getLimits(); + // Store the successful results for db insertions + // TODO: do we write the new state updates inbetween the calls? + const publicExecutionResults: PublicExecutionResult[] = []; + // TODO: Check is trnasaction gas updated where it should? // Execute the non revertible calls + for (const call of nonRevertibleCalls) { const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); if (!this.temporaryDidCallOrNestedCallRevert(res)) { console.log("non revertible call succeeded"); this.addNullifiers(res.nullifiers); + publicExecutionResults.push(res); this.worldStateDB.checkpoint(); } else { - console.log(" call or nested call failed"); - this.worldStateDB.rollbackToCommit(); + console.log("non revertible call or nested call failed"); + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCommit(); // TODO: When this part fails, we want skip to some cleanup part // This probably explains the interesting control flow @@ -170,7 +253,6 @@ export class LightPublicProcessor { } } - console.log("not returning") for (const call of publicCalls) { // Execute the non revertible calls const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); @@ -178,10 +260,12 @@ export class LightPublicProcessor { // TODO: tidy this up and below if (!this.temporaryDidCallOrNestedCallRevert(res)) { this.addNullifiers(res.nullifiers); + publicExecutionResults.push(res); } else { // Similarly, if a teardown call fails, it will revert // back to the setup state - this.worldStateDB.rollbackToCheckpoint(); + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCheckpoint(); } } @@ -190,29 +274,66 @@ export class LightPublicProcessor { if (!this.temporaryDidCallOrNestedCallRevert(res)) { this.addNullifiers(res.nullifiers); + publicExecutionResults.push(res); } else { // Similarly, if a teardown call fails, it will revert // back to the setup state - this.worldStateDB.rollbackToCheckpoint(); + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCheckpoint(); } } // TODO(md): think about bringing the journal model into this - this.worldStateDB.commit(); + await this.worldStateDB.commit(); // On success lower the block gas left -- add a check!!! this.blockGasLeft = this.blockGasLeft.sub(transactionGas); + + + return this.aggregatePublicExecutionResults(publicExecutionResults); + } + + // This is just getting the private state updates after executing them + // TODO(md): think about these + aggregatePublicExecutionResults(results: PublicExecutionResult[]) { + let nullifiers: ScopedNullifier[] = []; + let newNoteHashes: NoteHash[] = []; + let publicDataWrites: PublicDataUpdateRequest[] = []; + + for (const res of results) { + // TODO: I assume that these will need to be ordered by their side effect counter + nullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); + newNoteHashes.push(...getNonEmptyItems(res.noteHashes)); + publicDataWrites.push(...getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req))); + + // TODO: do for the nested executions + // for (const nested of res.nestedExecutions) { + // nullifiers.push(...getNonEmptyItems(nested.nullifiers).map(n => n.scope(nested.executionRequest.contractAddress))); + // newNoteHashes.push(...getNonEmptyItems(nested.noteHashes)); + // publicDataWrites.push(...getNonEmptyItems(nested.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(nested.executionRequest.contractAddress, req))); + // } + } + + + const returning = { + nullifiers, + newNoteHashes, + publicDataWrites + }; + console.log("after public execution new nullifiers", returning.nullifiers.filter(n => !n.isEmpty()).map(n => siloNullifier(n.contractAddress, n.value).toBuffer())); + console.log("after public execution new note hashes", returning.newNoteHashes.filter(n => !n.isEmpty()).map(n => n.toBuffer())); + return returning; } addNullifiers(nullifiers: Nullifier[]) { this.pendingNullifiers.push(...nullifiers); } - checkpointOrRollback(reverted: boolean) { + async checkpointOrRollback(reverted: boolean): Promise { if (reverted) { - this.worldStateDB.rollbackToCheckpoint(); + await this.worldStateDB.rollbackToCheckpoint(); } else { - this.worldStateDB.checkpoint(); + await this.worldStateDB.checkpoint(); } } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 2c92053eb91..851ed104d20 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -100,6 +100,9 @@ export class ValidatorClient implements Validator { } catch (error: any) { if (error instanceof TransactionsNotAvailableError) { this.log.error(`Transactions not available, skipping attestation ${error.message}`); + } else { + // TODO(md): this is a catch all error handler + this.log.error(`Failed to attest to proposal: ${error.message}`); } return undefined; } @@ -111,6 +114,7 @@ export class ValidatorClient implements Validator { return this.validationService.attestToProposal(proposal); } + // We do not want to run the private state updates until we know if the public has failed or not async reExecuteTransactions(proposal: BlockProposal) { // TODO(md): currently we are not running the rollups, so cannot calcualte the archive // - use pallas new work to do this @@ -140,41 +144,38 @@ export class ValidatorClient implements Validator { const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState(targetBlockNumber, undefined, header.globalVariables, txValidator); this.log.verbose(`Re-ex: Re-executing transactions`); - const txResults = await lightProcessor.process(filteredTransactions as Tx[]); + await lightProcessor.process(filteredTransactions as Tx[]); this.log.verbose(`Re-ex: Re-execution complete`); // TODO: update this to check the archive matches - if (txResults === undefined) { - this.log.error(`Failed to re-execute transactions: ${txs.join(', ')}`); - throw new FailedToReExecuteTransactionsError(txHashes); - } const [ newNullifierTree, newNoteHashTree, newPublicDataTree, - newL1ToL2MessageTree, + // newL1ToL2MessageTree, ] = await lightProcessor.getTreeSnapshots(); // Check the header has this information // TODO: replace with archive check once we have it - if (header.state.l1ToL2MessageTree.root.toBuffer() !== (await newL1ToL2MessageTree).root) { - this.log.error(`Re-ex: l1ToL2MessageTree does not match`); - throw new ReExStateMismatchError(); - } - if (header.state.partial.nullifierTree.root.toBuffer() !== (await newNullifierTree).root) { - this.log.error(`Re-ex: nullifierTree does not match`); + + // TODO: l1 to l2 messages need to be inserted separately at the start + // if (header.state.l1ToL2MessageTree.root.toBuffer() !== (await newL1ToL2MessageTree).root) { + // this.log.error(`Re-ex: l1ToL2MessageTree does not match`); + // throw new ReExStateMismatchError(); + // } + if (!header.state.partial.nullifierTree.root.toBuffer().equals(newNullifierTree.root)) { + this.log.error(`Re-ex: nullifierTree does not match, ${header.state.partial.nullifierTree.root.toBuffer().toString('hex')} !== ${newNullifierTree.root.toString('hex')}`); throw new ReExStateMismatchError(); } - if (header.state.partial.noteHashTree.root.toBuffer() !== (await newNoteHashTree).root) { - this.log.error(`Re-ex: noteHashTree does not match`); + if (!header.state.partial.noteHashTree.root.toBuffer().equals(newNoteHashTree.root)) { + this.log.error(`Re-ex: noteHashTree does not match, ${header.state.partial.noteHashTree.root.toBuffer().toString('hex')} !== ${newNoteHashTree.root.toString('hex')}`); throw new ReExStateMismatchError(); } - if (header.state.partial.publicDataTree.root.toBuffer() !== (await newPublicDataTree).root) { - this.log.error(`Re-ex: publicDataTree does not match`); + if (!header.state.partial.publicDataTree.root.toBuffer().equals(newPublicDataTree.root)) { + this.log.error(`Re-ex: publicDataTree does not match, ${header.state.partial.publicDataTree.root.toBuffer().toString('hex')} !== ${newPublicDataTree.root.toString('hex')}`); throw new ReExStateMismatchError(); } - } /** From 7f927f929b3bb39356ce738cb1d1c5faf08d23fe Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:08:26 +0000 Subject: [PATCH 057/123] use spam contract to test re-ex --- .../end-to-end/src/e2e_p2p_reex.test.ts | 12 +-- .../orchestrator/block-building-helpers.ts | 2 - .../src/public/light_public_processor.ts | 84 +++++++++++++------ 3 files changed, 67 insertions(+), 31 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts index 19515210192..50751fec5e4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts @@ -186,18 +186,20 @@ describe('e2e_p2p_network', () => { const wallet = (await createAccounts(pxe, 2))[0]; // add another account creation to pad the tx below - TODO(md): clean up - createAccounts(pxe,1); - token = await TokenContract.deploy(wallet, wallet.getAddress(), 'TestToken', 'TST', 18n).send().deployed(); - // spam = await SpamContract.deploy(wallet).send().deployed(); + // token = await TokenContract.deploy(wallet, wallet.getAddress(), 'TestToken', 'TST', 18n).send().deployed(); + spam = await SpamContract.deploy(wallet).send().deployed(); + await createAccounts(pxe,1); + const seed = 1234n + const spamCount = 15 for (let i = 0; i < numTxs; i++) { const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); // TODO: check out batch call for deployments // Send a public mint tx - this will be minted from the token contract to the pxe account - const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() - // const tx = spam.methods.spam(420n, 15, true).send() + // const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() + const tx = spam.methods.spam(seed + BigInt(i * spamCount), spamCount, true).send() const txHash = await tx.getTxHash(); logger.info(`Tx sent with hash ${txHash}`); diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index ddc2c542150..e674fc25e2f 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -427,8 +427,6 @@ export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: Merkl ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), ); - console.log("number of public data writes", allPublicDataWrites.length); - console.log("all public data writes", getNonEmptyItems(allPublicDataWrites)); const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index afbfa7ce767..0d38e6348aa 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -1,7 +1,7 @@ // A minimal version of the public processor - that does not have the fluff import { MerkleTreeId, PublicDataWrite, Tx, TxValidator, WorldStateSynchronizer } from "@aztec/circuit-types"; -import { AztecAddress, Gas, getNonEmptyItems, GlobalVariables, Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NoteHash, Nullifier, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, PublicDataUpdateRequest, ScopedNullifier } from "@aztec/circuits.js"; +import { AztecAddress, Gas, getNonEmptyItems, GlobalVariables, Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NoteHash, Nullifier, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, PublicDataUpdateRequest, ScopedNoteHash, ScopedNullifier } from "@aztec/circuits.js"; import { MerkleTreeOperations } from "@aztec/world-state"; import { WorldStateDB } from "./public_db_sources.js"; import { PublicExecutor } from "./executor.js"; @@ -123,13 +123,14 @@ export class LightPublicProcessor { const {nullifiers: nonRevertibleNullifiers, noteHashes: nonRevertibleNoteHashes, publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests} = tx.data.forPublic!.endNonRevertibleData; const {nullifiers: txNullifiers, noteHashes: txNoteHashes, publicDataUpdateRequests: txPublicDataUpdateRequests} = tx.data.forPublic!.end; - const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); + // tODO: make sure not RE removing empty items const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); - const publicNullifiers = nullifiers.map(n => siloNullifier(n.contractAddress, n.value)); + const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); + const publicNullifiers = getNonEmptyItems(nullifiers); - const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => siloNoteHash(n.contractAddress, n.noteHash.value)); - const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => siloNoteHash(n.contractAddress, n.noteHash.value)); - const publicNoteHashes = newNoteHashes.map(n => n.value); + const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => n.value); + const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => n.value); + const publicNoteHashes = getNonEmptyItems(newNoteHashes); const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter(p => !p.isEmpty()); @@ -144,11 +145,10 @@ export class LightPublicProcessor { ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), ); - // TODO: i think im going to need to silo these await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, allNoteHashes); await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, allNullifiers, NULLIFIER_SUBTREE_HEIGHT); - console.log("number of public data writes", allPublicDataWrites.length); + // console.log("number of public data writes", allPublicDataWrites.length); console.log("all public data writes", getNonEmptyItems(allPublicDataWrites)); const beingWritten = allPublicDataWrites.map(x => x.toBuffer()); await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, beingWritten, PUBLIC_DATA_SUBTREE_HEIGHT); @@ -296,35 +296,71 @@ export class LightPublicProcessor { // This is just getting the private state updates after executing them // TODO(md): think about these aggregatePublicExecutionResults(results: PublicExecutionResult[]) { - let nullifiers: ScopedNullifier[] = []; - let newNoteHashes: NoteHash[] = []; - let publicDataWrites: PublicDataUpdateRequest[] = []; + let txCallNewNullifiers: ScopedNullifier[][] = []; + let txCallNewNoteHashes: ScopedNoteHash[][] = []; + let txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; + let j = 0; + // TODO: sort these by side effect counter per request + // THE later ones then go first - which does not make sense to me here for (const res of results) { + let enqueuedCallNewNullifiers = []; + let enqueuedCallNewNoteHashes = []; + let enqueuedCallPublicDataWrites = []; + // TODO: I assume that these will need to be ordered by their side effect counter - nullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); - newNoteHashes.push(...getNonEmptyItems(res.noteHashes)); - publicDataWrites.push(...getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req))); + enqueuedCallNewNullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); + enqueuedCallNewNoteHashes.push(...getNonEmptyItems(res.noteHashes).map(n => n.scope(res.executionRequest.contractAddress))); + const tempPub = getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req)); + enqueuedCallPublicDataWrites.push(...tempPub); + + console.log("public data writes", tempPub); // TODO: do for the nested executions - // for (const nested of res.nestedExecutions) { - // nullifiers.push(...getNonEmptyItems(nested.nullifiers).map(n => n.scope(nested.executionRequest.contractAddress))); - // newNoteHashes.push(...getNonEmptyItems(nested.noteHashes)); - // publicDataWrites.push(...getNonEmptyItems(nested.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(nested.executionRequest.contractAddress, req))); - // } + let i = 0; + for (const nested of res.nestedExecutions) { + console.log("nested execution", i++); + console.log("first public data write", nested.contractStorageUpdateRequests[0]); + + const newNullifiers = getNonEmptyItems(nested.nullifiers).map(n => n.scope(nested.executionRequest.contractAddress)); + const newNoteHashes = getNonEmptyItems(nested.noteHashes).map(n => n.scope(nested.executionRequest.contractAddress)); + + enqueuedCallNewNullifiers.push(...newNullifiers); + enqueuedCallNewNoteHashes.push(...newNoteHashes); + enqueuedCallPublicDataWrites.push(...getNonEmptyItems(nested.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(nested.executionRequest.contractAddress, req))); + } + + enqueuedCallNewNullifiers = this.sortBySideEffectCounter(enqueuedCallNewNullifiers); + enqueuedCallNewNoteHashes = this.sortBySideEffectCounter(enqueuedCallNewNoteHashes); + enqueuedCallPublicDataWrites = this.sortBySideEffectCounter(enqueuedCallPublicDataWrites); + + txCallNewNullifiers.push(enqueuedCallNewNullifiers); + txCallNewNoteHashes.push(enqueuedCallNewNoteHashes); + txCallPublicDataWrites.push(enqueuedCallPublicDataWrites); } + // Reverse + // TODO: WHY to we need to do this? yucky yucky, reversal doesnt feel quite right + const newNullifiers = txCallNewNullifiers.reverse().flat(); + const newNoteHashes = txCallNewNoteHashes.reverse().flat(); + const newPublicDataWrites = txCallPublicDataWrites.reverse().flat(); const returning = { - nullifiers, - newNoteHashes, - publicDataWrites + nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), + newNoteHashes: newNoteHashes.map(n => siloNoteHash(n.contractAddress, n.value)), + publicDataWrites: newPublicDataWrites }; - console.log("after public execution new nullifiers", returning.nullifiers.filter(n => !n.isEmpty()).map(n => siloNullifier(n.contractAddress, n.value).toBuffer())); - console.log("after public execution new note hashes", returning.newNoteHashes.filter(n => !n.isEmpty()).map(n => n.toBuffer())); + console.log("after public execution new nullifiers", returning.nullifiers); + console.log("after public execution new note hashes", returning.newNoteHashes); return returning; } + // Sort by side effect counter, where the lowest is first + // TODO: refactor + sortBySideEffectCounter(items: T[]) { + return items.sort((a, b) => a.counter - b.counter); + } + addNullifiers(nullifiers: Nullifier[]) { this.pendingNullifiers.push(...nullifiers); } From 402374679b6809b6e897b655786357079264682b Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 30 Sep 2024 10:28:49 +0000 Subject: [PATCH 058/123] temp --- .../contracts/spam_contract/src/main.nr | 35 ++++++++++++++++++- .../end-to-end/src/e2e_p2p_reex.test.ts | 9 +++-- .../src/sequencer/sequencer.ts | 3 ++ .../src/public/light_public_processor.ts | 15 -------- .../validator-client/src/validator.ts | 6 ++-- 5 files changed, 47 insertions(+), 21 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr index e6206501fee..2afd1a2f3b2 100644 --- a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr @@ -2,6 +2,8 @@ mod types; use dep::aztec::macros::aztec; +global MAX_EXTERNAL_CALL_SPAM = 4; + // A contract used for testing a random hodgepodge of small features from simulator and end-to-end tests. #[aztec] contract Spam { @@ -29,7 +31,7 @@ contract Spam { } #[private] - fn spam(nullifier_seed: Field, nullifier_count: u32, call_public: bool) { + fn spam(nullifier_seed: Field, nullifier_count: u32, call_public: bool, call_recursive_spam: bool) { let caller = context.msg_sender(); let caller_keys = get_public_keys(caller); let amount = U128::from_integer(1); @@ -58,6 +60,14 @@ contract Spam { MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX ).enqueue(&mut context); } + + if (call_recursive_spam) { + Spam::at(context.this_address()).public_spam_with_external_call(0, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL).enqueue(&mut context); + Spam::at(context.this_address()).public_spam_with_external_call( + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + ).enqueue(&mut context); + } } #[public] @@ -69,4 +79,27 @@ contract Spam { storage.public_balances.at(i as Field).write(prev + one); } } + + // TODO(md): add something to the public spam such that we can test further enqueued calls + #[public] + #[internal] + fn public_spam_with_external_call(start: u32, end: u32) { + let one = U128::from_integer(1); + for i in start..end { + let prev = storage.public_balances.at(i as Field).read(); + storage.public_balances.at(i as Field).write(prev + one); + } + Spam::at(context.this_address()).recursive_spam(0).enqueue(&mut context); + } + + #[public] + fn recursive_spam(counter: u32) { + let one = U128::from_integer(1); + if (counter < MAX_EXTERNAL_CALL_SPAM) { + let prev = storage.public_balances.at(counter as Field).read(); + storage.public_balances.at(counter as Field).write(prev + one); + + Spam::at(context.this_address()).external_spam(counter + 1).call(&mut context); + } + } } diff --git a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts index 50751fec5e4..6d5e64015b2 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts @@ -32,13 +32,13 @@ import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; import { createAccounts } from '@aztec/accounts/testing'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds -const NUM_NODES = 2; -const NUM_TXS_PER_BLOCK = 2; -const NUM_TXS_PER_NODE = 2; +const NUM_NODES = 4; +const NUM_TXS_PER_BLOCK = 4; const BOOT_NODE_UDP_PORT = 40400; const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); +// TODO: set up a test snapshot to make the iteration speed faster on this set of tests describe('e2e_p2p_network', () => { let config: AztecNodeConfig; let logger: DebugLogger; @@ -47,6 +47,9 @@ describe('e2e_p2p_network', () => { let bootstrapNodeEnr: string; let deployL1ContractsValues: DeployL1Contracts; + + let snapshotManager: ISnapshotManager; + let token: TokenContract; let spam: SpamContract; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 0195d1afc3a..51465e6463d 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -435,9 +435,12 @@ export class Sequencer { const blockBuilder = this.blockBuilderFactory.create(this.worldState.getLatest()); await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); + const timer = new Timer(); const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => processor.process(validTxs, blockSize, blockBuilder, this.txValidatorFactory.validatorForProcessedTxs()), ); + this.log.info(`Public processor duration ${timer.ms()}ms`); + if (failedTxs.length > 0) { const failedTxData = failedTxs.map(fail => fail.tx); this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index 0d38e6348aa..62ac23fef69 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -99,7 +99,6 @@ export class LightPublicProcessor { // TODO: maybe there is some extra validation to make sure that we do not overrun // the checks that the kernel should have been doing? // If not we add them to the vm execution - console.log("validating txs"); // TODO(md): skiping tx validation for now as validator is not well defined // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); @@ -148,22 +147,16 @@ export class LightPublicProcessor { await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, allNoteHashes); await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, allNullifiers, NULLIFIER_SUBTREE_HEIGHT); - // console.log("number of public data writes", allPublicDataWrites.length); - console.log("all public data writes", getNonEmptyItems(allPublicDataWrites)); const beingWritten = allPublicDataWrites.map(x => x.toBuffer()); await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, beingWritten, PUBLIC_DATA_SUBTREE_HEIGHT); } } else { - // console.log("root before", await this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE)); await this.applyPrivateStateUpdates(tx); // Apply empty public data writes - console.log("applying empty public data writes"); const emptyPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, () => PublicDataTreeLeaf.empty()); await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, emptyPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); - // console.log("root after", await this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE)); - // console.log("applied private state updates"); } } @@ -238,12 +231,10 @@ export class LightPublicProcessor { const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); if (!this.temporaryDidCallOrNestedCallRevert(res)) { - console.log("non revertible call succeeded"); this.addNullifiers(res.nullifiers); publicExecutionResults.push(res); this.worldStateDB.checkpoint(); } else { - console.log("non revertible call or nested call failed"); await this.worldStateDB.removeNewContracts(tx); await this.worldStateDB.rollbackToCommit(); @@ -314,13 +305,9 @@ export class LightPublicProcessor { const tempPub = getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req)); enqueuedCallPublicDataWrites.push(...tempPub); - console.log("public data writes", tempPub); - // TODO: do for the nested executions let i = 0; for (const nested of res.nestedExecutions) { - console.log("nested execution", i++); - console.log("first public data write", nested.contractStorageUpdateRequests[0]); const newNullifiers = getNonEmptyItems(nested.nullifiers).map(n => n.scope(nested.executionRequest.contractAddress)); const newNoteHashes = getNonEmptyItems(nested.noteHashes).map(n => n.scope(nested.executionRequest.contractAddress)); @@ -350,8 +337,6 @@ export class LightPublicProcessor { newNoteHashes: newNoteHashes.map(n => siloNoteHash(n.contractAddress, n.value)), publicDataWrites: newPublicDataWrites }; - console.log("after public execution new nullifiers", returning.nullifiers); - console.log("after public execution new note hashes", returning.newNoteHashes); return returning; } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index dc16b63a115..0c4c3b6a8d1 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -19,6 +19,7 @@ import { import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; import { LightPublicProcessor, LightPublicProcessorFactory } from '@aztec/simulator'; +import { Timer } from '@aztec/foundation/timer'; export interface Validator { start(): Promise; @@ -144,10 +145,11 @@ export class ValidatorClient implements Validator { const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState(targetBlockNumber, undefined, header.globalVariables, txValidator); this.log.verbose(`Re-ex: Re-executing transactions`); + const timer = new Timer(); await lightProcessor.process(filteredTransactions as Tx[]); - this.log.verbose(`Re-ex: Re-execution complete`); + this.log.verbose(`Re-ex: Re-execution complete ${timer.ms()}ms`); - // TODO: update this to check the archive matches + // TODO(md): update this to check the archive matches const [ newNullifierTree, From a7416ad2ef1f65fd4b4b71169ddfec9cf5f92bb0 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 30 Sep 2024 15:35:40 +0000 Subject: [PATCH 059/123] temp --- .../contracts/spam_contract/src/main.nr | 12 +- .../end-to-end/src/e2e_p2p_network.test.ts | 2 +- .../end-to-end/src/e2e_p2p_reex.test.ts | 180 ++++++++++++------ .../end-to-end/src/e2e_synching.test.ts | 9 +- .../end-to-end/src/fixtures/setup_p2p_test.ts | 16 +- .../src/fixtures/snapshot_manager.ts | 34 +++- 6 files changed, 169 insertions(+), 84 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr index 2afd1a2f3b2..a8376de06f3 100644 --- a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr @@ -2,12 +2,13 @@ mod types; use dep::aztec::macros::aztec; -global MAX_EXTERNAL_CALL_SPAM = 4; // A contract used for testing a random hodgepodge of small features from simulator and end-to-end tests. #[aztec] contract Spam { + global MAX_EXTERNAL_CALL_SPAM = 4; + use dep::aztec::{ prelude::{Map, AztecAddress, PublicMutable}, encrypted_logs::{encrypted_note_emission::encode_and_encrypt_note_unconstrained}, @@ -63,10 +64,6 @@ contract Spam { if (call_recursive_spam) { Spam::at(context.this_address()).public_spam_with_external_call(0, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL).enqueue(&mut context); - Spam::at(context.this_address()).public_spam_with_external_call( - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX - ).enqueue(&mut context); } } @@ -89,7 +86,8 @@ contract Spam { let prev = storage.public_balances.at(i as Field).read(); storage.public_balances.at(i as Field).write(prev + one); } - Spam::at(context.this_address()).recursive_spam(0).enqueue(&mut context); + + Spam::at(context.this_address()).recursive_spam(0).call(&mut context); } #[public] @@ -99,7 +97,7 @@ contract Spam { let prev = storage.public_balances.at(counter as Field).read(); storage.public_balances.at(counter as Field).write(prev + one); - Spam::at(context.this_address()).external_spam(counter + 1).call(&mut context); + Spam::at(context.this_address()).recursive_spam(counter + 1).call(&mut context); } } } diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index 544b319acae..1b0177c2a74 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -132,7 +132,7 @@ describe('e2e_p2p_network', () => { // create our network of nodes and submit txs into each of them // the number of txs per node and the number of txs per rollup // should be set so that the only way for rollups to be built - // is if the txs are successfully gossiped around the nodes. + // is if the txs are successfully re-executed. const contexts: NodeContext[] = []; const nodes: AztecNodeService[] = await createNodes( config, diff --git a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts index 6d5e64015b2..02765198386 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts @@ -26,84 +26,149 @@ import { SpamContract, TokenContract } from '@aztec/noir-contracts.js'; import { createBootstrapNode, createNodes, + generatePeerIdPrivateKey, generatePeerIdPrivateKeys, } from './fixtures/setup_p2p_test.js'; import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; import { createAccounts } from '@aztec/accounts/testing'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { addAccounts, createSnapshotManager, ISnapshotManager, SubsystemsContext } from './fixtures/snapshot_manager.js'; +import { getRandomPort } from '@aztec/foundation/testing'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds -const NUM_NODES = 4; +const NUM_NODES = 2; const NUM_TXS_PER_BLOCK = 4; const BOOT_NODE_UDP_PORT = 40400; const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); -// TODO: set up a test snapshot to make the iteration speed faster on this set of tests +// const { E2E_DATA_PATH: dataPath } = process.env; +const dataPath = "./data-e2e_p2p_reex"; + +class ReExTest { + private snapshotManager: ISnapshotManager; + private logger: DebugLogger; + private accounts: CompleteAddress[] = []; + + constructor(testName: string) { + this.logger = createDebugLogger(`aztec:e2e_p2p_reex:${testName}`); + this.snapshotManager = createSnapshotManager(`e2e_p2p_reex/${testName}`, dataPath); + } + + async applyBaseSnapshot() { + await this.snapshotManager.snapshot("register validators", async () => { + + + + } ) + } +} + +// TODO: really need to setup a snapshot for this describe('e2e_p2p_network', () => { - let config: AztecNodeConfig; + let ctx: SubsystemsContext; let logger: DebugLogger; let teardown: () => Promise; let bootstrapNode: BootstrapNode; let bootstrapNodeEnr: string; - let deployL1ContractsValues: DeployL1Contracts; - + let wallet: AccountWalletWithSecretKey; let snapshotManager: ISnapshotManager; - let token: TokenContract; let spam: SpamContract; beforeEach(async () => { - // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` - const useLocalHost = false; - if (useLocalHost) { - jest.setTimeout(300_000); - } - const options = useLocalHost ? { l1RpcUrl: 'http://127.0.0.1:8545' } : {}; + logger = createDebugLogger('aztec:e2e_p2p_reex'); - // We need the very first node to be the sequencer for this is the one doing everything throughout the setup. - // Without it we will wait forever. - const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); + bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); + bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); + + // Put the original node on the bootstrap list so that it transactions we send to it are gossiped to other nodes + const port = await getRandomPort() || BOOT_NODE_UDP_PORT - 1; + const peerIdPrivateKey = generatePeerIdPrivateKey(); + const bootstrapConfig = { + p2pEnabled: true, + bootstrapNodes: [bootstrapNodeEnr], + peerIdPrivateKey, + udpListenAddress: `0.0.0.0:${port}`, + tcpListenAddress: `0.0.0.0:${port}`, + tcpAnnounceAddress: `127.0.0.1:${port}`, + udpAnnounceAddress: `127.0.0.1:${port}`, + }; + const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); const initialValidators = [EthAddress.fromString(account.address)]; + // If we give the initial node a bootstrap node, then we can attempt to connect through it - ({ teardown, config, logger, deployL1ContractsValues} = await setup(0, { + snapshotManager = createSnapshotManager(`e2e_p2p_reex`, dataPath, bootstrapConfig, { + assumeProvenThrough: undefined, initialValidators, - l1BlockTime: ETHEREUM_SLOT_DURATION, - salt: 420, - ...options, - })); + }); - bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); - bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); + await snapshotManager.snapshot("setup-account", addAccounts(1, logger, false), async ({accountKeys}, ctx) => { + const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); + await Promise.all(accountManagers.map(a => a.register())); + const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + wallet = wallets[0]; + }); - config.minTxsPerBlock = 1; - config.maxTxsPerBlock = NUM_TXS_PER_BLOCK; // + await snapshotManager.snapshot("add-spam-contract", async () => { + const spamContract = await SpamContract.deploy(wallet).send().deployed(); + return {contractAddress: spamContract.address}; + }, async ({contractAddress}) => { + spam = await SpamContract.at(contractAddress, wallet); + }); + // await snapshotManager.snapshot( + // "Add Nodes as validators", + // async ({deployL1ContractsValues}) => { + // const rollup = getContract({ + // address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + // abi: RollupAbi, + // client: deployL1ContractsValues.walletClient, + // }); + + // As a snapshot gets sent before hand, our account nonce may have increased. + // const nonce = await deployL1ContractsValues.publicClient.getTransactionCount({ address: }); + // const senderAddress = deployL1ContractsValues.walletClient.account.address; + // const nonce = await deployL1ContractsValues.publicClient.getTransactionCount({ address: senderAddress }); + // for (let i = 0; i < NUM_NODES; i++) { + // const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); + // // await rollup.write.addValidator([account.address], { nonce: nonce + i }); + // // console.log("nonce", nonce + i); + // await rollup.write.addValidator([account.address]); + // } + // } + // No need to do anything after setup is complete + // ) + + ctx = await snapshotManager.setup(); + + // TODO: could i setup this timejump? + //@note Now we jump ahead to the next epoch such that the validator committee is picked + // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! + // Which means that the validator set will still be empty! So anyone can propose. const rollup = getContract({ - address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + address: ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), abi: RollupAbi, - client: deployL1ContractsValues.walletClient, + client: ctx.deployL1ContractsValues.walletClient, }); + // TODO(md): we want this also in a snapshot but i am having nonce issues for (let i = 0; i < NUM_NODES; i++) { const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); + // await rollup.write.addValidator([account.address], { nonce: nonce + i }); + // console.log("nonce", nonce + i); await rollup.write.addValidator([account.address]); - logger.debug(`Adding ${account.address} as validator`); } - // Remove the initial sequencer from the set! This was the sequencer we used for perform the setup. - logger.debug(`Removing ${account.address} as validator`); - const txHash = await rollup.write.removeValidator([account.address]); - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); + ctx.aztecNodeConfig.minTxsPerBlock = 1; + ctx.aztecNodeConfig.maxTxsPerBlock = NUM_TXS_PER_BLOCK; // - //@note Now we jump ahead to the next epoch such that the validator committee is picked - // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! - // Which means that the validator set will still be empty! So anyone can propose. const slotsInEpoch = await rollup.read.EPOCH_DURATION(); const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); - const cheatCodes = new EthCheatCodes(config.l1RpcUrl); + const cheatCodes = new EthCheatCodes(ctx.aztecNodeConfig.l1RpcUrl); try { await cheatCodes.warp(Number(timestamp)); } catch (err) { @@ -111,9 +176,11 @@ describe('e2e_p2p_network', () => { } // Send and await a tx to make sure we mine a block for the warp to correctly progress. - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), + await ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await ctx.deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), }); + + }); const stopNodes = async (bootstrap: BootstrapNode, nodes: AztecNodeService[]) => { @@ -137,30 +204,36 @@ describe('e2e_p2p_network', () => { throw new Error('Bootstrap node ENR is not available'); } - config.validatorReEx = true; + ctx.aztecNodeConfig.validatorReEx = true; // create our network of nodes and submit txs into each of them // the number of txs per node and the number of txs per rollup // should be set so that the only way for rollups to be built // is if the txs are successfully gossiped around the nodes. + const nodes: AztecNodeService[] = await createNodes( - config, + ctx.aztecNodeConfig, PEER_ID_PRIVATE_KEYS, bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); + NUM_NODES, + BOOT_NODE_UDP_PORT, + ); + + + console.log("writing nodes ", nodes.length); + for (const node of nodes) { + const enr = await node.getEncodedEnr(); + console.log("node enr", enr?.toString()); + } + // spam = await SpamContract.deploy(wallet).send().deployed(); // wait a bit for peers to discover each other await sleep(4000); - // Just use the first node for the PXE - const pxeService = await createPXEService(nodes[0], getRpcConfig(), true); - // tODO: use a tx with nested calls - const txs = await submitTxsTo(pxeService, NUM_TXS_PER_BLOCK); + const txs = await submitTxsTo(NUM_TXS_PER_BLOCK); nodes.forEach(node => { node.getSequencer()?.updateSequencerConfig( { minTxsPerBlock: NUM_TXS_PER_BLOCK, @@ -181,28 +254,17 @@ describe('e2e_p2p_network', () => { }); // submits a set of transactions to the provided Private eXecution Environment (PXE) - const submitTxsTo = async (pxe: PXEService, numTxs: number) => { + const submitTxsTo = async (numTxs: number) => { const txs: SentTx[] = []; - // TODO: set min tx to 1 and see the re-ex padding - - const wallet = (await createAccounts(pxe, 2))[0]; - - // add another account creation to pad the tx below - TODO(md): clean up - // token = await TokenContract.deploy(wallet, wallet.getAddress(), 'TestToken', 'TST', 18n).send().deployed(); - spam = await SpamContract.deploy(wallet).send().deployed(); - await createAccounts(pxe,1); - const seed = 1234n const spamCount = 15 for (let i = 0; i < numTxs; i++) { - const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); - // TODO: check out batch call for deployments // Send a public mint tx - this will be minted from the token contract to the pxe account // const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() - const tx = spam.methods.spam(seed + BigInt(i * spamCount), spamCount, true).send() + const tx = spam.methods.spam(seed + BigInt(i * spamCount), spamCount, false, true ).send() const txHash = await tx.getTxHash(); logger.info(`Tx sent with hash ${txHash}`); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 18b1f7b4285..e70b6dbddfe 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -11,7 +11,6 @@ * * To run the Setup run with the `AZTEC_GENERATE_TEST_DATA=1` flag. Without * this flag, we will run in execution. - * * There is functionality to store the `stats` of a sync, but currently we * will simply be writing it to the log instead. * @@ -277,10 +276,10 @@ class TestVariant { const txs = []; for (let i = 0; i < this.txCount; i++) { const batch = new BatchCall(this.wallets[i], [ - this.spam.methods.spam(this.seed, 16, false).request(), - this.spam.methods.spam(this.seed + 16n, 16, false).request(), - this.spam.methods.spam(this.seed + 32n, 16, false).request(), - this.spam.methods.spam(this.seed + 48n, 15, true).request(), + this.spam.methods.spam(this.seed, 16, false, false).request(), + this.spam.methods.spam(this.seed + 16n, 16, false, false).request(), + this.spam.methods.spam(this.seed + 32n, 16, false, false).request(), + this.spam.methods.spam(this.seed + 48n, 15, true, false).request(), ]); this.seed += 100n; diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 99cdf0aa007..5fef1dcf6d0 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -21,11 +21,15 @@ export interface NodeContext { account: AztecAddress; } +export function generatePeerIdPrivateKey(): string { + // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ + return '08021220' + generatePrivateKey().substr(2, 66); +} + export function generatePeerIdPrivateKeys(numberOfPeers: number): string[] { const peerIdPrivateKeys = []; for (let i = 0; i < numberOfPeers; i++) { - // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ - peerIdPrivateKeys.push('08021220' + generatePrivateKey().substr(2, 66)); + peerIdPrivateKeys.push(generatePeerIdPrivateKey()); } return peerIdPrivateKeys; } @@ -38,12 +42,12 @@ export async function createNodes( numNodes: number, bootNodePort: number, ): Promise { - const nodes = []; + const nodesPromises = []; for (let i = 0; i < numNodes; i++) { - const node = await createNode(config, peerIdPrivateKeys[i], i + 1 + bootNodePort, bootstrapNodeEnr, i); - nodes.push(node); + const nodePromise = createNode(config, peerIdPrivateKeys[i], i + 1 + bootNodePort, bootstrapNodeEnr, i); + nodesPromises.push(nodePromise); } - return nodes; + return Promise.all(nodesPromises); } // creates a P2P enabled instance of Aztec Node Service diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 2155c660e7a..0a4543f66e7 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -33,14 +33,15 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; import getPort from 'get-port'; import { join } from 'path'; -import { type Hex } from 'viem'; -import { mnemonicToAccount } from 'viem/accounts'; +import { getContract, type Hex } from 'viem'; +import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; import { MNEMONIC } from './fixtures.js'; import { getACVMConfig } from './get_acvm_config.js'; import { getBBConfig } from './get_bb_config.js'; import { setupL1Contracts } from './setup_l1_contracts.js'; import { deployCanonicalAuthRegistry, deployCanonicalRouter, getPrivateKeyFromIndex } from './utils.js'; +import { RollupAbi } from '@aztec/l1-artifacts'; export type SubsystemsContext = { anvil: Anvil; @@ -65,7 +66,10 @@ export function createSnapshotManager( testName: string, dataPath?: string, config: Partial = {}, - deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER }, + deployL1ContractsArgs: Partial = { + assumeProvenThrough: Number.MAX_SAFE_INTEGER, + initialValidators: [], + }, ) { return dataPath ? new SnapshotManager(testName, dataPath, config, deployL1ContractsArgs) @@ -140,7 +144,10 @@ class SnapshotManager implements ISnapshotManager { testName: string, private dataPath: string, private config: Partial = {}, - private deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER }, + private deployL1ContractsArgs: Partial = { + assumeProvenThrough: Number.MAX_SAFE_INTEGER, + initialValidators: [], + }, ) { this.livePath = join(this.dataPath, 'live', testName); this.logger = createDebugLogger(`aztec:snapshot_manager:${testName}`); @@ -288,6 +295,7 @@ async function setupFromFresh( config: Partial = {}, deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER, + initialValidators: [], }, ): Promise { logger.verbose(`Initializing state...`); @@ -317,8 +325,8 @@ async function setupFromFresh( const publisherPrivKeyRaw = hdAccount.getHdKey().privateKey; const publisherPrivKey = publisherPrivKeyRaw === null ? null : Buffer.from(publisherPrivKeyRaw); - const validatorPrivKey = getPrivateKeyFromIndex(1); - const proverNodePrivateKey = getPrivateKeyFromIndex(2); + const validatorPrivKey = getPrivateKeyFromIndex(0); + const proverNodePrivateKey = getPrivateKeyFromIndex(0); aztecNodeConfig.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; aztecNodeConfig.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; @@ -380,6 +388,20 @@ async function setupFromFresh( writeFileSync(`${statePath}/aztec_node_config.json`, JSON.stringify(aztecNodeConfig)); } + // If initial validators are provided, we need to remove them from the node. + if (deployL1ContractsArgs.initialValidators && deployL1ContractsArgs.initialValidators?.length > 0) { + const rollup = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: deployL1ContractsValues.walletClient, + }); + + logger.debug(`Removing ${deployL1ContractsArgs.initialValidators[0].toString()} as validator`); + const txHash = await rollup.write.removeValidator([deployL1ContractsArgs.initialValidators[0].toString()]); + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); + } + + return { aztecNodeConfig, anvil, From c848c898446dc25ba4282c0a847bcba456bb224f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 30 Sep 2024 22:30:26 +0000 Subject: [PATCH 060/123] feat: snapshotting for e2e p2p tests --- .../src/e2e_p2p/gossip_network.test.ts | 76 ++++ .../end-to-end/src/e2e_p2p/p2p_network.ts | 114 ++++++ .../src/e2e_p2p/rediscovery.test.ts | 101 +++++ .../end-to-end/src/e2e_p2p/reqresp_tx.test.ts | 103 ++++++ yarn-project/end-to-end/src/e2e_p2p/shared.ts | 62 ++++ .../end-to-end/src/e2e_p2p_network.test.ts | 345 ------------------ .../end-to-end/src/fixtures/setup_p2p_test.ts | 74 ++-- .../src/fixtures/snapshot_manager.ts | 42 ++- yarn-project/end-to-end/src/fixtures/utils.ts | 2 +- 9 files changed, 530 insertions(+), 389 deletions(-) create mode 100644 yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts create mode 100644 yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts create mode 100644 yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts create mode 100644 yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts create mode 100644 yarn-project/end-to-end/src/e2e_p2p/shared.ts delete mode 100644 yarn-project/end-to-end/src/e2e_p2p_network.test.ts diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts new file mode 100644 index 00000000000..af95e133972 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -0,0 +1,76 @@ + +import { type AztecNodeService } from '@aztec/aztec-node'; +import { + sleep, +} from '@aztec/aztec.js'; + +import fs from 'fs'; + +import { + type NodeContext, + createNodes, +} from '../fixtures/setup_p2p_test.js'; + +// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds +const NUM_NODES = 4; +const NUM_TXS_PER_NODE = 2; + +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; + +describe('e2e_p2p_network', () => { + let t: P2PNetworkTest; + + beforeEach(async () => { + t = await P2PNetworkTest.create('e2e_p2p_network', NUM_NODES); + await t.applyBaseSnapshots(); + await t.setup(); + }); + + afterEach(async () => await t.teardown()); + + afterAll(() => { + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + } + }); + + it('should rollup txs from all peers', async () => { + // create the bootstrap node for the network + if (!t.bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + // create our network of nodes and submit txs into each of them + // the number of txs per node and the number of txs per rollup + // should be set so that the only way for rollups to be built + // is if the txs are successfully gossiped around the nodes. + const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = await createNodes( + t.ctx.aztecNodeConfig, + t.peerIdPrivateKeys, + t.bootstrapNodeEnr, + NUM_NODES, + ); + + // wait a bit for peers to discover each other + await sleep(4000); + + for (const node of nodes) { + const context = await createPXEServiceAndSubmitTransactions(t.logger, node, NUM_TXS_PER_NODE); + contexts.push(context); + } + + // now ensure that all txs were successfully mined + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + t.logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ), + ); + + // shutdown all nodes. + await t.stopNodes(nodes); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts new file mode 100644 index 00000000000..e3c35881be2 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -0,0 +1,114 @@ +import { getContract } from "viem"; +import { createSnapshotManager, ISnapshotManager, SubsystemsContext } from "../fixtures/snapshot_manager.js"; +import { RollupAbi } from "@aztec/l1-artifacts"; +import { BootstrapNode } from "@aztec/p2p"; +import { createDebugLogger, DebugLogger } from "@aztec/foundation/log"; +import { createBootstrapNode, createValidatorConfig, generateNodePrivateKeys, generatePeerIdPrivateKeys } from "../fixtures/setup_p2p_test.js"; +import { AztecNodeConfig, AztecNodeService } from "@aztec/aztec-node"; +import { EthCheatCodes } from "@aztec/aztec.js"; +import { EthAddress, ETHEREUM_SLOT_DURATION } from "@aztec/circuits.js"; +import { privateKeyToAccount } from "viem/accounts"; +import { getPrivateKeyFromIndex } from "../fixtures/utils.js"; +import { getRandomPort } from "@aztec/foundation/testing"; + +const BOOT_NODE_UDP_PORT = 404000; + +// TODO: use this eventually +export class P2PNetworkTest { + private snapshotManager: ISnapshotManager; + private baseAccount; + + public logger: DebugLogger; + + public ctx!: SubsystemsContext; + public nodePrivateKeys: `0x${string}`[] = []; + public peerIdPrivateKeys: string[] = []; + + public bootstrapNodeEnr: string = ''; + + constructor(testName: string, public bootstrapNode: BootstrapNode, public bootNodePort: number, private numberOfNodes: number, initialValidatorAddress: string, initialValidatorConfig: AztecNodeConfig) { + this.logger = createDebugLogger(`aztec:e2e_p2p:${testName}`); + + this.baseAccount = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); + this.nodePrivateKeys = generateNodePrivateKeys(1, numberOfNodes); + this.peerIdPrivateKeys = generatePeerIdPrivateKeys(numberOfNodes); + + this.bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); + + const initialValidators = [EthAddress.fromString(initialValidatorAddress)]; + + this.snapshotManager = createSnapshotManager(`e2e_p2p_network/${testName}`, process.env.E2E_DATA_PATH, { + ...initialValidatorConfig, + l1BlockTime: ETHEREUM_SLOT_DURATION, + salt: 420, + initialValidators, + }); + } + + static async create(testName: string, numberOfNodes: number) { + const port = await getRandomPort() || BOOT_NODE_UDP_PORT; + const bootstrapNode = await createBootstrapNode(port); + const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); + + const validatorPrivateKey = getPrivateKeyFromIndex(0)!; + const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); + const intiailValidatorAddress = privateKeyToAccount(initialValidatorConfig.publisherPrivateKey).address; + + return new P2PNetworkTest(testName, bootstrapNode, port, numberOfNodes, intiailValidatorAddress, initialValidatorConfig); + } + + async applyBaseSnapshots() { + await this.snapshotManager.snapshot("add-validators", async ({deployL1ContractsValues, aztecNodeConfig}) => { + const rollup = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: deployL1ContractsValues.walletClient, + }); + + for (let i = 0; i < this.numberOfNodes; i++) { + const account = privateKeyToAccount(this.nodePrivateKeys[i]!); + await rollup.write.addValidator([account.address]); + this.logger.debug(`Adding ${account.address} as validator`); + } + + //@note Now we jump ahead to the next epoch such that the validator committee is picked + // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! + // Which means that the validator set will still be empty! So anyone can propose. + const slotsInEpoch = await rollup.read.EPOCH_DURATION(); + const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); + const cheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); + try { + await cheatCodes.warp(Number(timestamp)); + } catch (err) { + this.logger.debug('Warp failed, time already satisfied'); + } + + // Send and await a tx to make sure we mine a block for the warp to correctly progress. + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: this.baseAccount.address, value: 1n, account: this.baseAccount }), + }); + }) + + } + + async setup() { + this.ctx = await this.snapshotManager.setup(); + + // TODO(md): make it such that the test can set these up + this.ctx.aztecNodeConfig.minTxsPerBlock = 4; + this.ctx.aztecNodeConfig.maxTxsPerBlock = 4; + } + + async stopNodes(nodes: AztecNodeService[]) { + for (const node of nodes) { + await node.stop(); + } + await this.bootstrapNode.stop(); + } + + async teardown() { + await this.snapshotManager.teardown(); + } + + +} \ No newline at end of file diff --git a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts new file mode 100644 index 00000000000..37e02d97507 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts @@ -0,0 +1,101 @@ +import { type AztecNodeService } from '@aztec/aztec-node'; +import { + sleep, +} from '@aztec/aztec.js'; +import { type BootstrapNode } from '@aztec/p2p'; + +import fs from 'fs'; + +import { + type NodeContext, + createNode, + createNodes, +} from '../fixtures/setup_p2p_test.js'; + +// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds +const NUM_NODES = 4; +const NUM_TXS_PER_NODE = 2; +const BOOT_NODE_UDP_PORT = 40400; + +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; + +describe('e2e_p2p_rediscovery', () => { + let t: P2PNetworkTest; + + beforeEach(async () => { + t = await P2PNetworkTest.create('e2e_p2p_rediscovery', NUM_NODES); + await t.applyBaseSnapshots(); + await t.setup(); + }); + + afterEach(async () => await t.teardown()); + + afterAll(() => { + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + } + }); + + + it('should re-discover stored peers without bootstrap node', async () => { + const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = await createNodes( + t.ctx.aztecNodeConfig, + t.peerIdPrivateKeys, + t.bootstrapNodeEnr, + NUM_NODES, + BOOT_NODE_UDP_PORT, + ); + + // wait a bit for peers to discover each other + await sleep(3000); + + // stop bootstrap node + await t.bootstrapNode.stop(); + + // create new nodes from datadir + const newNodes: AztecNodeService[] = []; + + // stop all nodes + for (let i = 0; i < NUM_NODES; i++) { + const node = nodes[i]; + await node.stop(); + t.logger.info(`Node ${i} stopped`); + await sleep(1200); + // TODO: make a restart nodes function + const newNode = await createNode( + t.ctx.aztecNodeConfig, + t.peerIdPrivateKeys[i], + i + 1 + BOOT_NODE_UDP_PORT, + undefined, + i, + `./data-${i}`, + ); + t.logger.info(`Node ${i} restarted`); + newNodes.push(newNode); + } + + // wait a bit for peers to discover each other + await sleep(2000); + + for (const node of newNodes) { + const context = await createPXEServiceAndSubmitTransactions(t.logger, node, NUM_TXS_PER_NODE); + contexts.push(context); + } + + // now ensure that all txs were successfully mined + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + t.logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ), + ); + + // shutdown all nodes. + await t.stopNodes(newNodes); + }); + +}); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts new file mode 100644 index 00000000000..c79d3162d55 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts @@ -0,0 +1,103 @@ +import { type AztecNodeService } from '@aztec/aztec-node'; +import { + sleep, +} from '@aztec/aztec.js'; + +import fs from 'fs'; + +import { jest } from '@jest/globals'; + +import { + type NodeContext, + createNodes, +} from '../fixtures/setup_p2p_test.js'; + +// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds +const NUM_NODES = 4; +const NUM_TXS_PER_NODE = 2; + +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; + +describe('e2e_p2p_reqresp_tx', () => { + let t: P2PNetworkTest; + + beforeEach(async () => { + t = await P2PNetworkTest.create('e2e_p2p_reqresp_tx', NUM_NODES); + await t.applyBaseSnapshots(); + await t.setup(); + }); + + afterEach(async () => await t.teardown()); + + afterAll(() => { + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + } + }); + + // NOTE: If this test fails in a PR where the shuffling algorithm is changed, then it is failing as the node with + // the mocked p2p layer is being picked as the sequencer, and it does not have any transactions in it's mempool. + // If this is the case, then we should update the test to switch off the mempool of a different node. + // adjust `nodeToTurnOffTxGossip` in the test below. + it('should produce an attestation by requesting tx data over the p2p network', async () => { + /** + * Birds eye overview of the test + * 1. We spin up x nodes + * 2. We turn off receiving a tx via gossip from two of the nodes + * 3. We send a transactions and gossip it to other nodes + * 4. The disabled nodes will receive an attestation that it does not have the data for + * 5. They will request this data over the p2p layer + * 6. We receive all of the attestations that we need and we produce the block + * + * Note: we do not attempt to let this node produce a block, as it will not have received any transactions + * from the other pxes. + */ + + if (!t.bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + const contexts: NodeContext[] = []; + const nodes: AztecNodeService[] = await createNodes( + t.ctx.aztecNodeConfig, + t.peerIdPrivateKeys, + t.bootstrapNodeEnr, + NUM_NODES, + ); + + // wait a bit for peers to discover each other + await sleep(4000); + + // Replace the p2p node implementation of some of the nodes with a spy such that it does not store transactions that are gossiped to it + // Original implementation of `processTxFromPeer` will store received transactions in the tx pool. + // We have chosen nodes 0,3 as they do not get chosen to be the sequencer in this test. + const nodeToTurnOffTxGossip = [0, 3]; + for (const nodeIndex of nodeToTurnOffTxGossip) { + jest + .spyOn((nodes[nodeIndex] as any).p2pClient.p2pService, 'processTxFromPeer') + .mockImplementation((): Promise => { + return Promise.resolve(); + }); + } + + // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. + // If the shuffling algorithm changes, then this will need to be updated. + for (let i = 0; i < 2; i++) { + const context = await createPXEServiceAndSubmitTransactions(t.logger, nodes[i], NUM_TXS_PER_NODE); + contexts.push(context); + } + + await Promise.all( + contexts.flatMap((context, i) => + context.txs.map(async (tx, j) => { + t.logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + await tx.wait(); + t.logger.info(`Tx ${i}-${j}: ${await tx.getTxHash()} has been mined`); + return await tx.getTxHash(); + }), + ), + ); + + await t.stopNodes(nodes); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts new file mode 100644 index 00000000000..481a5716344 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -0,0 +1,62 @@ +import { AztecNodeService } from "@aztec/aztec-node"; +import { NodeContext } from "../fixtures/setup_p2p_test.js"; +import { getSchnorrAccount } from "@aztec/accounts/schnorr"; +import { DebugLogger, SentTx } from "@aztec/aztec.js"; +import { TxStatus } from "@aztec/circuit-types"; +import { CompleteAddress, GrumpkinScalar } from "@aztec/circuits.js"; +import { Fr } from "@aztec/foundation/fields"; +import { createPXEService, PXEService ,getPXEServiceConfig as getRpcConfig } from "@aztec/pxe"; + + // creates an instance of the PXE and submit a given number of transactions to it. + export const createPXEServiceAndSubmitTransactions = async ( + logger: DebugLogger, + node: AztecNodeService, + numTxs: number, + ): Promise => { + const rpcConfig = getRpcConfig(); + const pxeService = await createPXEService(node, rpcConfig, true); + + const secretKey = Fr.random(); + const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); + await pxeService.registerAccount(secretKey, completeAddress.partialAddress); + + const txs = await submitTxsTo(logger, pxeService, numTxs); + return { + txs, + account: completeAddress.address, + pxeService, + node, + }; + }; + + // submits a set of transactions to the provided Private eXecution Environment (PXE) + const submitTxsTo = async (logger: DebugLogger, pxe: PXEService, numTxs: number) => { + const txs: SentTx[] = []; + for (let i = 0; i < numTxs; i++) { + // const tx = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()).deploy(); + const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); + const deployMethod = await accountManager.getDeployMethod(); + await deployMethod.create({ + contractAddressSalt: accountManager.salt, + skipClassRegistration: true, + skipPublicDeployment: true, + universalDeploy: true, + }); + await deployMethod.prove({}); + const tx = deployMethod.send(); + + const txHash = await tx.getTxHash(); + + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; + }; \ No newline at end of file diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts deleted file mode 100644 index 6caf8f6bec4..00000000000 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ /dev/null @@ -1,345 +0,0 @@ -import { getSchnorrAccount } from '@aztec/accounts/schnorr'; -import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { - CompleteAddress, - type DebugLogger, - type DeployL1Contracts, - EthCheatCodes, - Fr, - GrumpkinScalar, - type SentTx, - TxStatus, - sleep, -} from '@aztec/aztec.js'; -import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; -import { RollupAbi } from '@aztec/l1-artifacts'; -import { type BootstrapNode } from '@aztec/p2p'; -import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; - -import { jest } from '@jest/globals'; -import fs from 'fs'; -import { getContract } from 'viem'; -import { privateKeyToAccount } from 'viem/accounts'; - -import { - type NodeContext, - createBootstrapNode, - createNode, - createNodes, - generatePeerIdPrivateKeys, -} from './fixtures/setup_p2p_test.js'; -import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; - -// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds -const NUM_NODES = 4; -const NUM_TXS_PER_BLOCK = 4; -const NUM_TXS_PER_NODE = 2; -const BOOT_NODE_UDP_PORT = 40400; - -const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); - -describe('e2e_p2p_network', () => { - let config: AztecNodeConfig; - let logger: DebugLogger; - let teardown: () => Promise; - let bootstrapNode: BootstrapNode; - let bootstrapNodeEnr: string; - let deployL1ContractsValues: DeployL1Contracts; - - beforeEach(async () => { - // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` - const useLocalHost = false; - if (useLocalHost) { - jest.setTimeout(300_000); - } - const options = useLocalHost ? { l1RpcUrl: 'http://127.0.0.1:8545' } : {}; - - // We need the very first node to be the sequencer for this is the one doing everything throughout the setup. - // Without it we will wait forever. - const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); - - const initialValidators = [EthAddress.fromString(account.address)]; - - ({ teardown, config, logger, deployL1ContractsValues } = await setup(0, { - initialValidators, - l1BlockTime: ETHEREUM_SLOT_DURATION, - salt: 420, - ...options, - })); - - bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); - bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - - config.minTxsPerBlock = NUM_TXS_PER_BLOCK; - config.maxTxsPerBlock = NUM_TXS_PER_BLOCK; - - const rollup = getContract({ - address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), - abi: RollupAbi, - client: deployL1ContractsValues.walletClient, - }); - - for (let i = 0; i < NUM_NODES; i++) { - const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); - await rollup.write.addValidator([account.address]); - logger.debug(`Adding ${account.address} as validator`); - } - - // Remove the initial sequencer from the set! This was the sequencer we used for perform the setup. - logger.debug(`Removing ${account.address} as validator`); - const txHash = await rollup.write.removeValidator([account.address]); - - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); - - //@note Now we jump ahead to the next epoch such that the validator committee is picked - // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! - // Which means that the validator set will still be empty! So anyone can propose. - const slotsInEpoch = await rollup.read.EPOCH_DURATION(); - const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); - const cheatCodes = new EthCheatCodes(config.l1RpcUrl); - try { - await cheatCodes.warp(Number(timestamp)); - } catch (err) { - logger.debug('Warp failed, time already satisfied'); - } - - // Send and await a tx to make sure we mine a block for the warp to correctly progress. - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), - }); - }); - - const stopNodes = async (bootstrap: BootstrapNode, nodes: AztecNodeService[]) => { - for (const node of nodes) { - await node.stop(); - } - await bootstrap.stop(); - }; - - afterEach(() => teardown()); - - afterAll(() => { - for (let i = 0; i < NUM_NODES; i++) { - fs.rmSync(`./data-${i}`, { recursive: true, force: true }); - } - }); - - it('should rollup txs from all peers', async () => { - // create the bootstrap node for the network - if (!bootstrapNodeEnr) { - throw new Error('Bootstrap node ENR is not available'); - } - // create our network of nodes and submit txs into each of them - // the number of txs per node and the number of txs per rollup - // should be set so that the only way for rollups to be built - // is if the txs are successfully gossiped around the nodes. - const contexts: NodeContext[] = []; - const nodes: AztecNodeService[] = await createNodes( - config, - PEER_ID_PRIVATE_KEYS, - bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); - - // wait a bit for peers to discover each other - await sleep(4000); - - for (const node of nodes) { - const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); - contexts.push(context); - } - - // now ensure that all txs were successfully mined - await Promise.all( - contexts.flatMap((context, i) => - context.txs.map(async (tx, j) => { - logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - return tx.wait(); - }), - ), - ); - - // shutdown all nodes. - await stopNodes(bootstrapNode, nodes); - }); - - // NOTE: If this test fails in a PR where the shuffling algorithm is changed, then it is failing as the node with - // the mocked p2p layer is being picked as the sequencer, and it does not have any transactions in it's mempool. - // If this is the case, then we should update the test to switch off the mempool of a different node. - // adjust `nodeToTurnOffTxGossip` in the test below. - it('should produce an attestation by requesting tx data over the p2p network', async () => { - /** - * Birds eye overview of the test - * 1. We spin up x nodes - * 2. We turn off receiving a tx via gossip from two of the nodes - * 3. We send a transactions and gossip it to other nodes - * 4. The disabled nodes will receive an attestation that it does not have the data for - * 5. They will request this data over the p2p layer - * 6. We receive all of the attestations that we need and we produce the block - * - * Note: we do not attempt to let this node produce a block, as it will not have received any transactions - * from the other pxes. - */ - - if (!bootstrapNodeEnr) { - throw new Error('Bootstrap node ENR is not available'); - } - const contexts: NodeContext[] = []; - const nodes: AztecNodeService[] = await createNodes( - config, - PEER_ID_PRIVATE_KEYS, - bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); - - // wait a bit for peers to discover each other - await sleep(4000); - - // Replace the p2p node implementation of some of the nodes with a spy such that it does not store transactions that are gossiped to it - // Original implementation of `processTxFromPeer` will store received transactions in the tx pool. - // We have chosen nodes 0,3 as they do not get chosen to be the sequencer in this test. - const nodeToTurnOffTxGossip = [0, 3]; - for (const nodeIndex of nodeToTurnOffTxGossip) { - jest - .spyOn((nodes[nodeIndex] as any).p2pClient.p2pService, 'processTxFromPeer') - .mockImplementation((): Promise => { - return Promise.resolve(); - }); - } - - // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. - // If the shuffling algorithm changes, then this will need to be updated. - for (let i = 0; i < 2; i++) { - const context = await createPXEServiceAndSubmitTransactions(nodes[i], NUM_TXS_PER_NODE); - contexts.push(context); - } - - await Promise.all( - contexts.flatMap((context, i) => - context.txs.map(async (tx, j) => { - logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - await tx.wait(); - logger.info(`Tx ${i}-${j}: ${await tx.getTxHash()} has been mined`); - return await tx.getTxHash(); - }), - ), - ); - - await stopNodes(bootstrapNode, nodes); - }); - - it('should re-discover stored peers without bootstrap node', async () => { - const contexts: NodeContext[] = []; - const nodes: AztecNodeService[] = await createNodes( - config, - PEER_ID_PRIVATE_KEYS, - bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); - - // wait a bit for peers to discover each other - await sleep(3000); - - // stop bootstrap node - await bootstrapNode.stop(); - - // create new nodes from datadir - const newNodes: AztecNodeService[] = []; - - // stop all nodes - for (let i = 0; i < NUM_NODES; i++) { - const node = nodes[i]; - await node.stop(); - logger.info(`Node ${i} stopped`); - await sleep(1200); - // TODO: make a restart nodes function - const newNode = await createNode( - config, - PEER_ID_PRIVATE_KEYS[i], - i + 1 + BOOT_NODE_UDP_PORT, - undefined, - i, - `./data-${i}`, - ); - logger.info(`Node ${i} restarted`); - newNodes.push(newNode); - } - - // wait a bit for peers to discover each other - await sleep(2000); - - for (const node of newNodes) { - const context = await createPXEServiceAndSubmitTransactions(node, NUM_TXS_PER_NODE); - contexts.push(context); - } - - // now ensure that all txs were successfully mined - await Promise.all( - contexts.flatMap((context, i) => - context.txs.map(async (tx, j) => { - logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - return tx.wait(); - }), - ), - ); - - // shutdown all nodes. - await stopNodes(bootstrapNode, newNodes); - }); - - // creates an instance of the PXE and submit a given number of transactions to it. - const createPXEServiceAndSubmitTransactions = async ( - node: AztecNodeService, - numTxs: number, - ): Promise => { - const rpcConfig = getRpcConfig(); - const pxeService = await createPXEService(node, rpcConfig, true); - - const secretKey = Fr.random(); - const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); - await pxeService.registerAccount(secretKey, completeAddress.partialAddress); - - const txs = await submitTxsTo(pxeService, numTxs); - return { - txs, - account: completeAddress.address, - pxeService, - node, - }; - }; - - // submits a set of transactions to the provided Private eXecution Environment (PXE) - const submitTxsTo = async (pxe: PXEService, numTxs: number) => { - const txs: SentTx[] = []; - for (let i = 0; i < numTxs; i++) { - // const tx = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()).deploy(); - const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); - const deployMethod = await accountManager.getDeployMethod(); - await deployMethod.create({ - contractAddressSalt: accountManager.salt, - skipClassRegistration: true, - skipPublicDeployment: true, - universalDeploy: true, - }); - await deployMethod.prove({}); - const tx = deployMethod.send(); - - const txHash = await tx.getTxHash(); - - logger.info(`Tx sent with hash ${txHash}`); - const receipt = await tx.getReceipt(); - expect(receipt).toEqual( - expect.objectContaining({ - status: TxStatus.PENDING, - error: '', - }), - ); - logger.info(`Receipt received for ${txHash}`); - txs.push(tx); - } - return txs; - }; -}); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 560677f6bee..ab370d186d3 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -11,6 +11,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { generatePrivateKey } from 'viem/accounts'; import { getPrivateKeyFromIndex } from './utils.js'; +import { getRandomPort } from '@aztec/foundation/testing'; export interface NodeContext { node: AztecNodeService; @@ -19,11 +20,24 @@ export interface NodeContext { account: AztecAddress; } +export function generateNodePrivateKeys(startIndex: number, numberOfNodes: number): `0x${string}`[] { + const nodePrivateKeys: `0x${string}`[] = []; + // Do not start from 0 as it is used during setup + for (let i = startIndex; i < startIndex + numberOfNodes; i++) { + nodePrivateKeys.push(`0x${getPrivateKeyFromIndex(i)!.toString('hex')}`); + } + return nodePrivateKeys; +} + +export function generatePeerIdPrivateKey(): string { + // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ + return '08021220' + generatePrivateKey().substr(2, 66); +} + export function generatePeerIdPrivateKeys(numberOfPeers: number): string[] { const peerIdPrivateKeys = []; for (let i = 0; i < numberOfPeers; i++) { - // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ - peerIdPrivateKeys.push('08021220' + generatePrivateKey().substr(2, 66)); + peerIdPrivateKeys.push(generatePeerIdPrivateKey()); } return peerIdPrivateKeys; } @@ -33,40 +47,53 @@ export async function createNodes( peerIdPrivateKeys: string[], bootstrapNodeEnr: string, numNodes: number, - bootNodePort: number, + bootNodePort?: number, ): Promise { - const nodes = []; + const nodePromises = []; for (let i = 0; i < numNodes; i++) { - const node = await createNode(config, peerIdPrivateKeys[i], i + 1 + bootNodePort, bootstrapNodeEnr, i); - nodes.push(node); + // TODO(md): explain + const port = bootNodePort ? bootNodePort + i + 1 : await getRandomPort() || 40401 + i; + + const nodePromise = createNode(config, peerIdPrivateKeys[i], port, bootstrapNodeEnr, i); + nodePromises.push(nodePromise); } - return nodes; + return Promise.all(nodePromises); } // creates a P2P enabled instance of Aztec Node Service export async function createNode( config: AztecNodeConfig, peerIdPrivateKey: string, - tcpListenPort: number, + tcpPort: number, bootstrapNode: string | undefined, publisherAddressIndex: number, dataDirectory?: string, ) { - // We use different L1 publisher accounts in order to avoid duplicate tx nonces. We start from - // publisherAddressIndex + 1 because index 0 was already used during test environment setup. - const publisherPrivKey = getPrivateKeyFromIndex(publisherAddressIndex + 1); - config.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; + const validatorConfig = await createValidatorConfig(config, bootstrapNode, tcpPort, peerIdPrivateKey, publisherAddressIndex, dataDirectory); + return await AztecNodeService.createAndSync( + validatorConfig, + new NoopTelemetryClient(), + createDebugLogger(`aztec:node-${tcpPort}`), + ); +} + +export async function createValidatorConfig(config: AztecNodeConfig, bootstrapNodeEnr?: string, port?: number, peerIdPrivateKey?: string, accountIndex: number = 0, dataDirectory?: string) { + peerIdPrivateKey = peerIdPrivateKey ?? generatePeerIdPrivateKey(); + port = port ?? await getRandomPort()!; - const validatorPrivKey = getPrivateKeyFromIndex(1 + publisherAddressIndex); - config.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; + const privateKey = getPrivateKeyFromIndex(accountIndex); + const privateKeyHex: `0x${string}` = `0x${privateKey!.toString('hex')}`; - const newConfig: AztecNodeConfig = { + config.publisherPrivateKey = privateKeyHex; + config.validatorPrivateKey = privateKeyHex; + + const nodeConfig: AztecNodeConfig = { ...config, peerIdPrivateKey: peerIdPrivateKey, - udpListenAddress: `0.0.0.0:${tcpListenPort}`, - tcpListenAddress: `0.0.0.0:${tcpListenPort}`, - tcpAnnounceAddress: `127.0.0.1:${tcpListenPort}`, - udpAnnounceAddress: `127.0.0.1:${tcpListenPort}`, + udpListenAddress: `0.0.0.0:${port}`, + tcpListenAddress: `0.0.0.0:${port}`, + tcpAnnounceAddress: `127.0.0.1:${port}`, + udpAnnounceAddress: `127.0.0.1:${port}`, minTxsPerBlock: config.minTxsPerBlock, maxTxsPerBlock: config.maxTxsPerBlock, p2pEnabled: true, @@ -74,13 +101,10 @@ export async function createNode( l2QueueSize: 1, transactionProtocol: '', dataDirectory, - bootstrapNodes: bootstrapNode ? [bootstrapNode] : [], + bootstrapNodes: bootstrapNodeEnr ? [bootstrapNodeEnr] : [], }; - return await AztecNodeService.createAndSync( - newConfig, - new NoopTelemetryClient(), - createDebugLogger(`aztec:node-${tcpListenPort}`), - ); + + return nodeConfig; } export async function createBootstrapNode(port: number) { diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 2155c660e7a..9c6aa28317b 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -40,7 +40,8 @@ import { MNEMONIC } from './fixtures.js'; import { getACVMConfig } from './get_acvm_config.js'; import { getBBConfig } from './get_bb_config.js'; import { setupL1Contracts } from './setup_l1_contracts.js'; -import { deployCanonicalAuthRegistry, deployCanonicalRouter, getPrivateKeyFromIndex } from './utils.js'; +import { deployCanonicalAuthRegistry, deployCanonicalRouter, getPrivateKeyFromIndex, SetupOptions, startAnvil } from './utils.js'; +import { config } from 'process'; export type SubsystemsContext = { anvil: Anvil; @@ -64,8 +65,11 @@ type SnapshotEntry = { export function createSnapshotManager( testName: string, dataPath?: string, - config: Partial = {}, - deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER }, + config: Partial = {}, + deployL1ContractsArgs: Partial = { + assumeProvenThrough: Number.MAX_SAFE_INTEGER, + initialValidators: [] + }, ) { return dataPath ? new SnapshotManager(testName, dataPath, config, deployL1ContractsArgs) @@ -139,7 +143,7 @@ class SnapshotManager implements ISnapshotManager { constructor( testName: string, private dataPath: string, - private config: Partial = {}, + private config: Partial = {}, private deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER }, ) { this.livePath = join(this.dataPath, 'live', testName); @@ -285,7 +289,7 @@ export async function createAndSyncProverNode( async function setupFromFresh( statePath: string | undefined, logger: Logger, - config: Partial = {}, + opts: SetupOptions = {}, deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER, }, @@ -294,22 +298,14 @@ async function setupFromFresh( // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. - const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars(), ...config }; + const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars(), ...opts }; aztecNodeConfig.dataDirectory = statePath; // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. logger.verbose('Starting anvil...'); - const anvil = await retry( - async () => { - const ethereumHostPort = await getPort(); - aztecNodeConfig.l1RpcUrl = `http://127.0.0.1:${ethereumHostPort}`; - const anvil = createAnvil({ anvilBinary: './scripts/anvil_kill_wrapper.sh', port: ethereumHostPort }); - await anvil.start(); - return anvil; - }, - 'Start anvil', - makeBackoff([5, 5, 5]), - ); + const res = await startAnvil(opts.l1BlockTime); + const anvil = res.anvil; + aztecNodeConfig.l1RpcUrl = res.rpcUrl; // Deploy our L1 contracts. logger.verbose('Deploying L1 contracts...'); @@ -323,11 +319,21 @@ async function setupFromFresh( aztecNodeConfig.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; aztecNodeConfig.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; + const ethCheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); + + if (opts.l1StartTime) { + await ethCheatCodes.warp(opts.l1StartTime); + } + const deployL1ContractsValues = await setupL1Contracts( aztecNodeConfig.l1RpcUrl, hdAccount, logger, - deployL1ContractsArgs, + { + salt: opts.salt, + initialValidators: opts.initialValidators, + ...deployL1ContractsArgs, + } ); aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; aztecNodeConfig.l1PublishRetryIntervalMS = 100; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 7d456587d34..2b55ee5ab66 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -284,7 +284,7 @@ async function setupWithRemoteEnvironment( } /** Options for the e2e tests setup */ -type SetupOptions = { +export type SetupOptions = { /** State load */ stateLoad?: string; /** Previously deployed contracts on L1 */ From c55bb14187bdf3cd591fb3fe54de8553a2a055ad Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Mon, 30 Sep 2024 22:34:57 +0000 Subject: [PATCH 061/123] fmt --- .../src/e2e_p2p/gossip_network.test.ts | 15 +-- .../end-to-end/src/e2e_p2p/p2p_network.ts | 79 ++++++++----- .../src/e2e_p2p/rediscovery.test.ts | 18 +-- .../end-to-end/src/e2e_p2p/reqresp_tx.test.ts | 17 +-- yarn-project/end-to-end/src/e2e_p2p/shared.ts | 109 +++++++++--------- .../end-to-end/src/fixtures/setup_p2p_test.ts | 26 ++++- .../src/fixtures/snapshot_manager.ts | 27 +++-- 7 files changed, 150 insertions(+), 141 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index af95e133972..a91557f28ef 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -1,23 +1,16 @@ - import { type AztecNodeService } from '@aztec/aztec-node'; -import { - sleep, -} from '@aztec/aztec.js'; +import { sleep } from '@aztec/aztec.js'; import fs from 'fs'; -import { - type NodeContext, - createNodes, -} from '../fixtures/setup_p2p_test.js'; +import { type NodeContext, createNodes } from '../fixtures/setup_p2p_test.js'; +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; const NUM_TXS_PER_NODE = 2; -import { P2PNetworkTest } from './p2p_network.js'; -import { createPXEServiceAndSubmitTransactions } from './shared.js'; - describe('e2e_p2p_network', () => { let t: P2PNetworkTest; diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index e3c35881be2..3ebdbb806d8 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,15 +1,22 @@ -import { getContract } from "viem"; -import { createSnapshotManager, ISnapshotManager, SubsystemsContext } from "../fixtures/snapshot_manager.js"; -import { RollupAbi } from "@aztec/l1-artifacts"; -import { BootstrapNode } from "@aztec/p2p"; -import { createDebugLogger, DebugLogger } from "@aztec/foundation/log"; -import { createBootstrapNode, createValidatorConfig, generateNodePrivateKeys, generatePeerIdPrivateKeys } from "../fixtures/setup_p2p_test.js"; -import { AztecNodeConfig, AztecNodeService } from "@aztec/aztec-node"; -import { EthCheatCodes } from "@aztec/aztec.js"; -import { EthAddress, ETHEREUM_SLOT_DURATION } from "@aztec/circuits.js"; -import { privateKeyToAccount } from "viem/accounts"; -import { getPrivateKeyFromIndex } from "../fixtures/utils.js"; -import { getRandomPort } from "@aztec/foundation/testing"; +import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; +import { EthCheatCodes } from '@aztec/aztec.js'; +import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { getRandomPort } from '@aztec/foundation/testing'; +import { RollupAbi } from '@aztec/l1-artifacts'; +import { type BootstrapNode } from '@aztec/p2p'; + +import { getContract } from 'viem'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { + createBootstrapNode, + createValidatorConfig, + generateNodePrivateKeys, + generatePeerIdPrivateKeys, +} from '../fixtures/setup_p2p_test.js'; +import { type ISnapshotManager, type SubsystemsContext, createSnapshotManager } from '../fixtures/snapshot_manager.js'; +import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; const BOOT_NODE_UDP_PORT = 404000; @@ -26,7 +33,14 @@ export class P2PNetworkTest { public bootstrapNodeEnr: string = ''; - constructor(testName: string, public bootstrapNode: BootstrapNode, public bootNodePort: number, private numberOfNodes: number, initialValidatorAddress: string, initialValidatorConfig: AztecNodeConfig) { + constructor( + testName: string, + public bootstrapNode: BootstrapNode, + public bootNodePort: number, + private numberOfNodes: number, + initialValidatorAddress: string, + initialValidatorConfig: AztecNodeConfig, + ) { this.logger = createDebugLogger(`aztec:e2e_p2p:${testName}`); this.baseAccount = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); @@ -46,19 +60,25 @@ export class P2PNetworkTest { } static async create(testName: string, numberOfNodes: number) { - const port = await getRandomPort() || BOOT_NODE_UDP_PORT; + const port = (await getRandomPort()) || BOOT_NODE_UDP_PORT; const bootstrapNode = await createBootstrapNode(port); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - const validatorPrivateKey = getPrivateKeyFromIndex(0)!; const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); const intiailValidatorAddress = privateKeyToAccount(initialValidatorConfig.publisherPrivateKey).address; - return new P2PNetworkTest(testName, bootstrapNode, port, numberOfNodes, intiailValidatorAddress, initialValidatorConfig); + return new P2PNetworkTest( + testName, + bootstrapNode, + port, + numberOfNodes, + intiailValidatorAddress, + initialValidatorConfig, + ); } async applyBaseSnapshots() { - await this.snapshotManager.snapshot("add-validators", async ({deployL1ContractsValues, aztecNodeConfig}) => { + await this.snapshotManager.snapshot('add-validators', async ({ deployL1ContractsValues, aztecNodeConfig }) => { const rollup = getContract({ address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), abi: RollupAbi, @@ -77,18 +97,21 @@ export class P2PNetworkTest { const slotsInEpoch = await rollup.read.EPOCH_DURATION(); const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); const cheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); - try { - await cheatCodes.warp(Number(timestamp)); - } catch (err) { - this.logger.debug('Warp failed, time already satisfied'); - } + try { + await cheatCodes.warp(Number(timestamp)); + } catch (err) { + this.logger.debug('Warp failed, time already satisfied'); + } - // Send and await a tx to make sure we mine a block for the warp to correctly progress. + // Send and await a tx to make sure we mine a block for the warp to correctly progress. await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: this.baseAccount.address, value: 1n, account: this.baseAccount }), + hash: await deployL1ContractsValues.walletClient.sendTransaction({ + to: this.baseAccount.address, + value: 1n, + account: this.baseAccount, + }), }); - }) - + }); } async setup() { @@ -109,6 +132,4 @@ export class P2PNetworkTest { async teardown() { await this.snapshotManager.teardown(); } - - -} \ No newline at end of file +} diff --git a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts index 37e02d97507..f5e82747a5a 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts @@ -1,25 +1,17 @@ import { type AztecNodeService } from '@aztec/aztec-node'; -import { - sleep, -} from '@aztec/aztec.js'; -import { type BootstrapNode } from '@aztec/p2p'; +import { sleep } from '@aztec/aztec.js'; import fs from 'fs'; -import { - type NodeContext, - createNode, - createNodes, -} from '../fixtures/setup_p2p_test.js'; +import { type NodeContext, createNode, createNodes } from '../fixtures/setup_p2p_test.js'; +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; const NUM_TXS_PER_NODE = 2; const BOOT_NODE_UDP_PORT = 40400; -import { P2PNetworkTest } from './p2p_network.js'; -import { createPXEServiceAndSubmitTransactions } from './shared.js'; - describe('e2e_p2p_rediscovery', () => { let t: P2PNetworkTest; @@ -37,7 +29,6 @@ describe('e2e_p2p_rediscovery', () => { } }); - it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; const nodes: AztecNodeService[] = await createNodes( @@ -97,5 +88,4 @@ describe('e2e_p2p_rediscovery', () => { // shutdown all nodes. await t.stopNodes(newNodes); }); - }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts index c79d3162d55..cc031baa725 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts @@ -1,24 +1,17 @@ import { type AztecNodeService } from '@aztec/aztec-node'; -import { - sleep, -} from '@aztec/aztec.js'; - -import fs from 'fs'; +import { sleep } from '@aztec/aztec.js'; import { jest } from '@jest/globals'; +import fs from 'fs'; -import { - type NodeContext, - createNodes, -} from '../fixtures/setup_p2p_test.js'; +import { type NodeContext, createNodes } from '../fixtures/setup_p2p_test.js'; +import { P2PNetworkTest } from './p2p_network.js'; +import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; const NUM_TXS_PER_NODE = 2; -import { P2PNetworkTest } from './p2p_network.js'; -import { createPXEServiceAndSubmitTransactions } from './shared.js'; - describe('e2e_p2p_reqresp_tx', () => { let t: P2PNetworkTest; diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index 481a5716344..d7d4f3bac3f 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -1,62 +1,61 @@ -import { AztecNodeService } from "@aztec/aztec-node"; -import { NodeContext } from "../fixtures/setup_p2p_test.js"; -import { getSchnorrAccount } from "@aztec/accounts/schnorr"; -import { DebugLogger, SentTx } from "@aztec/aztec.js"; -import { TxStatus } from "@aztec/circuit-types"; -import { CompleteAddress, GrumpkinScalar } from "@aztec/circuits.js"; -import { Fr } from "@aztec/foundation/fields"; -import { createPXEService, PXEService ,getPXEServiceConfig as getRpcConfig } from "@aztec/pxe"; +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { type AztecNodeService } from '@aztec/aztec-node'; +import { type DebugLogger, type SentTx } from '@aztec/aztec.js'; +import { CompleteAddress, TxStatus } from '@aztec/aztec.js'; +import { Fr, GrumpkinScalar } from '@aztec/foundation/fields'; +import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; - // creates an instance of the PXE and submit a given number of transactions to it. - export const createPXEServiceAndSubmitTransactions = async ( - logger: DebugLogger, - node: AztecNodeService, - numTxs: number, - ): Promise => { - const rpcConfig = getRpcConfig(); - const pxeService = await createPXEService(node, rpcConfig, true); +import { type NodeContext } from '../fixtures/setup_p2p_test.js'; - const secretKey = Fr.random(); - const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); - await pxeService.registerAccount(secretKey, completeAddress.partialAddress); +// creates an instance of the PXE and submit a given number of transactions to it. +export const createPXEServiceAndSubmitTransactions = async ( + logger: DebugLogger, + node: AztecNodeService, + numTxs: number, +): Promise => { + const rpcConfig = getRpcConfig(); + const pxeService = await createPXEService(node, rpcConfig, true); - const txs = await submitTxsTo(logger, pxeService, numTxs); - return { - txs, - account: completeAddress.address, - pxeService, - node, - }; + const secretKey = Fr.random(); + const completeAddress = CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, Fr.random()); + await pxeService.registerAccount(secretKey, completeAddress.partialAddress); + + const txs = await submitTxsTo(logger, pxeService, numTxs); + return { + txs, + account: completeAddress.address, + pxeService, + node, }; +}; - // submits a set of transactions to the provided Private eXecution Environment (PXE) - const submitTxsTo = async (logger: DebugLogger, pxe: PXEService, numTxs: number) => { - const txs: SentTx[] = []; - for (let i = 0; i < numTxs; i++) { - // const tx = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()).deploy(); - const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); - const deployMethod = await accountManager.getDeployMethod(); - await deployMethod.create({ - contractAddressSalt: accountManager.salt, - skipClassRegistration: true, - skipPublicDeployment: true, - universalDeploy: true, - }); - await deployMethod.prove({}); - const tx = deployMethod.send(); +// submits a set of transactions to the provided Private eXecution Environment (PXE) +const submitTxsTo = async (logger: DebugLogger, pxe: PXEService, numTxs: number) => { + const txs: SentTx[] = []; + for (let i = 0; i < numTxs; i++) { + const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); + const deployMethod = await accountManager.getDeployMethod(); + await deployMethod.create({ + contractAddressSalt: accountManager.salt, + skipClassRegistration: true, + skipPublicDeployment: true, + universalDeploy: true, + }); + await deployMethod.prove({}); + const tx = deployMethod.send(); - const txHash = await tx.getTxHash(); + const txHash = await tx.getTxHash(); - logger.info(`Tx sent with hash ${txHash}`); - const receipt = await tx.getReceipt(); - expect(receipt).toEqual( - expect.objectContaining({ - status: TxStatus.PENDING, - error: '', - }), - ); - logger.info(`Receipt received for ${txHash}`); - txs.push(tx); - } - return txs; - }; \ No newline at end of file + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; +}; diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index ab370d186d3..6739355b2b7 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -4,6 +4,7 @@ import { type AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, createDebugLogger } from '@aztec/aztec.js'; import { type AztecAddress } from '@aztec/circuits.js'; +import { getRandomPort } from '@aztec/foundation/testing'; import { type BootnodeConfig, BootstrapNode, createLibP2PPeerId } from '@aztec/p2p'; import { type PXEService } from '@aztec/pxe'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -11,7 +12,6 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { generatePrivateKey } from 'viem/accounts'; import { getPrivateKeyFromIndex } from './utils.js'; -import { getRandomPort } from '@aztec/foundation/testing'; export interface NodeContext { node: AztecNodeService; @@ -30,7 +30,7 @@ export function generateNodePrivateKeys(startIndex: number, numberOfNodes: numbe } export function generatePeerIdPrivateKey(): string { - // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ + // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ return '08021220' + generatePrivateKey().substr(2, 66); } @@ -52,7 +52,7 @@ export async function createNodes( const nodePromises = []; for (let i = 0; i < numNodes; i++) { // TODO(md): explain - const port = bootNodePort ? bootNodePort + i + 1 : await getRandomPort() || 40401 + i; + const port = bootNodePort ? bootNodePort + i + 1 : (await getRandomPort()) || 40401 + i; const nodePromise = createNode(config, peerIdPrivateKeys[i], port, bootstrapNodeEnr, i); nodePromises.push(nodePromise); @@ -69,7 +69,14 @@ export async function createNode( publisherAddressIndex: number, dataDirectory?: string, ) { - const validatorConfig = await createValidatorConfig(config, bootstrapNode, tcpPort, peerIdPrivateKey, publisherAddressIndex, dataDirectory); + const validatorConfig = await createValidatorConfig( + config, + bootstrapNode, + tcpPort, + peerIdPrivateKey, + publisherAddressIndex, + dataDirectory, + ); return await AztecNodeService.createAndSync( validatorConfig, new NoopTelemetryClient(), @@ -77,9 +84,16 @@ export async function createNode( ); } -export async function createValidatorConfig(config: AztecNodeConfig, bootstrapNodeEnr?: string, port?: number, peerIdPrivateKey?: string, accountIndex: number = 0, dataDirectory?: string) { +export async function createValidatorConfig( + config: AztecNodeConfig, + bootstrapNodeEnr?: string, + port?: number, + peerIdPrivateKey?: string, + accountIndex: number = 0, + dataDirectory?: string, +) { peerIdPrivateKey = peerIdPrivateKey ?? generatePeerIdPrivateKey(); - port = port ?? await getRandomPort()!; + port = port ?? (await getRandomPort()!); const privateKey = getPrivateKeyFromIndex(accountIndex); const privateKeyHex: `0x${string}` = `0x${privateKey!.toString('hex')}`; diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 9c6aa28317b..3f37ae8bea9 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -21,7 +21,6 @@ import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; import { type DeployL1ContractsArgs, createL1Clients } from '@aztec/ethereum'; import { asyncMap } from '@aztec/foundation/async-map'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; -import { makeBackoff, retry } from '@aztec/foundation/retry'; import { resolver, reviver } from '@aztec/foundation/serialize'; import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; @@ -40,8 +39,13 @@ import { MNEMONIC } from './fixtures.js'; import { getACVMConfig } from './get_acvm_config.js'; import { getBBConfig } from './get_bb_config.js'; import { setupL1Contracts } from './setup_l1_contracts.js'; -import { deployCanonicalAuthRegistry, deployCanonicalRouter, getPrivateKeyFromIndex, SetupOptions, startAnvil } from './utils.js'; -import { config } from 'process'; +import { + type SetupOptions, + deployCanonicalAuthRegistry, + deployCanonicalRouter, + getPrivateKeyFromIndex, + startAnvil, +} from './utils.js'; export type SubsystemsContext = { anvil: Anvil; @@ -68,7 +72,7 @@ export function createSnapshotManager( config: Partial = {}, deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER, - initialValidators: [] + initialValidators: [], }, ) { return dataPath @@ -325,16 +329,11 @@ async function setupFromFresh( await ethCheatCodes.warp(opts.l1StartTime); } - const deployL1ContractsValues = await setupL1Contracts( - aztecNodeConfig.l1RpcUrl, - hdAccount, - logger, - { - salt: opts.salt, - initialValidators: opts.initialValidators, - ...deployL1ContractsArgs, - } - ); + const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.l1RpcUrl, hdAccount, logger, { + salt: opts.salt, + initialValidators: opts.initialValidators, + ...deployL1ContractsArgs, + }); aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; aztecNodeConfig.l1PublishRetryIntervalMS = 100; From d72ee9587b27df47c1517d5046d9fcc4dc598dc4 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 11:56:33 +0000 Subject: [PATCH 062/123] fix --- yarn-project/end-to-end/Earthfile | 2 +- .../src/e2e_p2p/gossip_network.test.ts | 20 +++++++++++----- .../end-to-end/src/e2e_p2p/p2p_network.ts | 12 +++++----- .../src/e2e_p2p/rediscovery.test.ts | 16 +++++++------ .../end-to-end/src/e2e_p2p/reqresp_tx.test.ts | 23 +++++++++++++------ .../end-to-end/src/fixtures/setup_p2p_test.ts | 12 ++++++---- .../p2p/src/service/libp2p_service.ts | 4 ++-- .../p2p/src/service/reqresp/reqresp.ts | 14 +++++++---- 8 files changed, 64 insertions(+), 39 deletions(-) diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index c349b153efb..fe2be42c453 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -2,7 +2,7 @@ VERSION 0.8 e2e-p2p: LOCALLY - RUN ./scripts/e2e_test.sh ./src/e2e_p2p_network.test.ts + RUN "./scripts/e2e_test.sh ./src/e2e_p2p/ --runInBand" e2e-l1-with-wall-time: LOCALLY diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index a91557f28ef..8c70f8d8582 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -10,21 +10,23 @@ import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; const NUM_TXS_PER_NODE = 2; +const BOOT_NODE_UDP_PORT = 40600; + +const DATA_DIR = './data/gossip'; describe('e2e_p2p_network', () => { let t: P2PNetworkTest; - beforeEach(async () => { - t = await P2PNetworkTest.create('e2e_p2p_network', NUM_NODES); + beforeAll(async () => { + t = await P2PNetworkTest.create('e2e_p2p_network', NUM_NODES, BOOT_NODE_UDP_PORT); await t.applyBaseSnapshots(); await t.setup(); }); - afterEach(async () => await t.teardown()); - - afterAll(() => { + afterEach(async () => { + await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { - fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); } }); @@ -38,21 +40,26 @@ describe('e2e_p2p_network', () => { // should be set so that the only way for rollups to be built // is if the txs are successfully gossiped around the nodes. const contexts: NodeContext[] = []; + t.logger.info('Creating nodes'); const nodes: AztecNodeService[] = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, NUM_NODES, + BOOT_NODE_UDP_PORT, + DATA_DIR, ); // wait a bit for peers to discover each other await sleep(4000); + t.logger.info('Submitting transactions'); for (const node of nodes) { const context = await createPXEServiceAndSubmitTransactions(t.logger, node, NUM_TXS_PER_NODE); contexts.push(context); } + t.logger.info('Waiting for transactions to be mined'); // now ensure that all txs were successfully mined await Promise.all( contexts.flatMap((context, i) => @@ -62,6 +69,7 @@ describe('e2e_p2p_network', () => { }), ), ); + t.logger.info('All transactions mined'); // shutdown all nodes. await t.stopNodes(nodes); diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 3ebdbb806d8..9d2c4fd3ebf 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -2,7 +2,6 @@ import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; import { EthCheatCodes } from '@aztec/aztec.js'; import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { getRandomPort } from '@aztec/foundation/testing'; import { RollupAbi } from '@aztec/l1-artifacts'; import { type BootstrapNode } from '@aztec/p2p'; @@ -17,10 +16,8 @@ import { } from '../fixtures/setup_p2p_test.js'; import { type ISnapshotManager, type SubsystemsContext, createSnapshotManager } from '../fixtures/snapshot_manager.js'; import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import getPort from 'get-port'; -const BOOT_NODE_UDP_PORT = 404000; - -// TODO: use this eventually export class P2PNetworkTest { private snapshotManager: ISnapshotManager; private baseAccount; @@ -43,6 +40,7 @@ export class P2PNetworkTest { ) { this.logger = createDebugLogger(`aztec:e2e_p2p:${testName}`); + // Set up the base account and node private keys for the initial network deployment this.baseAccount = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); this.nodePrivateKeys = generateNodePrivateKeys(1, numberOfNodes); this.peerIdPrivateKeys = generatePeerIdPrivateKeys(numberOfNodes); @@ -59,8 +57,8 @@ export class P2PNetworkTest { }); } - static async create(testName: string, numberOfNodes: number) { - const port = (await getRandomPort()) || BOOT_NODE_UDP_PORT; + static async create(testName: string, numberOfNodes: number, basePort?: number) { + const port = basePort || await getPort(); const bootstrapNode = await createBootstrapNode(port); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); @@ -123,10 +121,12 @@ export class P2PNetworkTest { } async stopNodes(nodes: AztecNodeService[]) { + this.logger.info('Stopping nodes'); for (const node of nodes) { await node.stop(); } await this.bootstrapNode.stop(); + this.logger.info('Nodes stopped'); } async teardown() { diff --git a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts index f5e82747a5a..e2ffbb3739d 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts @@ -12,20 +12,21 @@ const NUM_NODES = 4; const NUM_TXS_PER_NODE = 2; const BOOT_NODE_UDP_PORT = 40400; +const DATA_DIR = './data/rediscovery'; + describe('e2e_p2p_rediscovery', () => { let t: P2PNetworkTest; - beforeEach(async () => { - t = await P2PNetworkTest.create('e2e_p2p_rediscovery', NUM_NODES); + beforeAll(async () => { + t = await P2PNetworkTest.create('e2e_p2p_rediscovery', NUM_NODES, BOOT_NODE_UDP_PORT); await t.applyBaseSnapshots(); await t.setup(); }); - afterEach(async () => await t.teardown()); - - afterAll(() => { + afterAll(async () => { + await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { - fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); } }); @@ -37,6 +38,7 @@ describe('e2e_p2p_rediscovery', () => { t.bootstrapNodeEnr, NUM_NODES, BOOT_NODE_UDP_PORT, + DATA_DIR, ); // wait a bit for peers to discover each other @@ -61,7 +63,7 @@ describe('e2e_p2p_rediscovery', () => { i + 1 + BOOT_NODE_UDP_PORT, undefined, i, - `./data-${i}`, + `${DATA_DIR}-${i}`, ); t.logger.info(`Node ${i} restarted`); newNodes.push(newNode); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts index cc031baa725..3e62a2fed71 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts @@ -11,21 +11,23 @@ import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; const NUM_TXS_PER_NODE = 2; +const BOOT_NODE_UDP_PORT = 40800; + +const DATA_DIR = './data/data-reqresp'; describe('e2e_p2p_reqresp_tx', () => { let t: P2PNetworkTest; - beforeEach(async () => { - t = await P2PNetworkTest.create('e2e_p2p_reqresp_tx', NUM_NODES); + beforeAll(async () => { + t = await P2PNetworkTest.create('e2e_p2p_reqresp_tx', NUM_NODES, BOOT_NODE_UDP_PORT); await t.applyBaseSnapshots(); await t.setup(); }); - afterEach(async () => await t.teardown()); - - afterAll(() => { + afterAll(async () => { + await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { - fs.rmSync(`./data-${i}`, { recursive: true, force: true }); + fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); } }); @@ -51,16 +53,21 @@ describe('e2e_p2p_reqresp_tx', () => { throw new Error('Bootstrap node ENR is not available'); } const contexts: NodeContext[] = []; + + t.logger.info('Creating nodes'); const nodes: AztecNodeService[] = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, NUM_NODES, + BOOT_NODE_UDP_PORT, + DATA_DIR, ); // wait a bit for peers to discover each other await sleep(4000); + t.logger.info('Turning off tx gossip'); // Replace the p2p node implementation of some of the nodes with a spy such that it does not store transactions that are gossiped to it // Original implementation of `processTxFromPeer` will store received transactions in the tx pool. // We have chosen nodes 0,3 as they do not get chosen to be the sequencer in this test. @@ -73,6 +80,7 @@ describe('e2e_p2p_reqresp_tx', () => { }); } + t.logger.info('Submitting transactions'); // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. // If the shuffling algorithm changes, then this will need to be updated. for (let i = 0; i < 2; i++) { @@ -80,6 +88,7 @@ describe('e2e_p2p_reqresp_tx', () => { contexts.push(context); } + t.logger.info('Waiting for transactions to be mined'); await Promise.all( contexts.flatMap((context, i) => context.txs.map(async (tx, j) => { @@ -90,7 +99,7 @@ describe('e2e_p2p_reqresp_tx', () => { }), ), ); - + t.logger.info('All transactions mined'); await t.stopNodes(nodes); }); }); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 6739355b2b7..0512a92a6be 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -4,7 +4,6 @@ import { type AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, createDebugLogger } from '@aztec/aztec.js'; import { type AztecAddress } from '@aztec/circuits.js'; -import { getRandomPort } from '@aztec/foundation/testing'; import { type BootnodeConfig, BootstrapNode, createLibP2PPeerId } from '@aztec/p2p'; import { type PXEService } from '@aztec/pxe'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -12,6 +11,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { generatePrivateKey } from 'viem/accounts'; import { getPrivateKeyFromIndex } from './utils.js'; +import getPort from 'get-port'; export interface NodeContext { node: AztecNodeService; @@ -48,13 +48,15 @@ export async function createNodes( bootstrapNodeEnr: string, numNodes: number, bootNodePort?: number, + dataDirectory?: string, ): Promise { const nodePromises = []; for (let i = 0; i < numNodes; i++) { - // TODO(md): explain - const port = bootNodePort ? bootNodePort + i + 1 : (await getRandomPort()) || 40401 + i; + // We run on ports from the bootnode upwards if a port if provided, otherwise we get a random port + const port = bootNodePort ? bootNodePort + i + 1 : await getPort(); - const nodePromise = createNode(config, peerIdPrivateKeys[i], port, bootstrapNodeEnr, i); + const dataDir = dataDirectory ? `${dataDirectory}-${i}` : undefined; + const nodePromise = createNode(config, peerIdPrivateKeys[i], port, bootstrapNodeEnr, i, dataDir); nodePromises.push(nodePromise); } return Promise.all(nodePromises); @@ -93,7 +95,7 @@ export async function createValidatorConfig( dataDirectory?: string, ) { peerIdPrivateKey = peerIdPrivateKey ?? generatePeerIdPrivateKey(); - port = port ?? (await getRandomPort()!); + port = port ?? await getPort(); const privateKey = getPrivateKeyFromIndex(accountIndex); const privateKeyHex: `0x${string}` = `0x${privateKey!.toString('hex')}`; diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index 2c6778eb8fb..755439b5f2e 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -182,12 +182,12 @@ export class LibP2PService implements P2PService { await this.discoveryRunningPromise?.stop(); this.logger.debug('Stopping peer discovery service...'); await this.peerDiscoveryService.stop(); + this.logger.debug('Request response service stopped...'); + await this.reqresp.stop(); this.logger.debug('Stopping LibP2P...'); await this.stopLibP2P(); this.logger.info('LibP2P service stopped'); this.logger.debug('Stopping request response service...'); - await this.reqresp.stop(); - this.logger.debug('Request response service stopped...'); } /** diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index 41a9fd97d1e..a2249015c2f 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -36,8 +36,6 @@ import { RequestResponseRateLimiter } from './rate_limiter/rate_limiter.js'; export class ReqResp { protected readonly logger: Logger; - private abortController: AbortController = new AbortController(); - private overallRequestTimeoutMs: number; private individualRequestTimeoutMs: number; @@ -78,9 +76,16 @@ export class ReqResp { for (const protocol of Object.keys(this.subProtocolHandlers)) { await this.libp2p.unhandle(protocol); } + + // Close all active connections + const closeStreamPromises = this.libp2p.getConnections().map(connection => connection.close()); + await Promise.all(closeStreamPromises); + this.logger.debug('ReqResp: All active streams closed'); + this.rateLimiter.stop(); - await this.libp2p.stop(); - this.abortController.abort(); + this.logger.debug('ReqResp: Rate limiter stopped'); + + // NOTE: We assume libp2p instance is managed by the caller } /** @@ -187,7 +192,6 @@ export class ReqResp { let stream: Stream | undefined; try { stream = await this.libp2p.dialProtocol(peerId, subProtocol); - this.logger.debug(`Stream opened with ${peerId.toString()} for ${subProtocol}`); // Open the stream with a timeout From 334262322858fc717dbcbf27532041cae4766372 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 11:57:26 +0000 Subject: [PATCH 063/123] fmt --- yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts | 4 ++-- yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 9d2c4fd3ebf..88d11fe1aad 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -5,6 +5,7 @@ import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; import { type BootstrapNode } from '@aztec/p2p'; +import getPort from 'get-port'; import { getContract } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; @@ -16,7 +17,6 @@ import { } from '../fixtures/setup_p2p_test.js'; import { type ISnapshotManager, type SubsystemsContext, createSnapshotManager } from '../fixtures/snapshot_manager.js'; import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; -import getPort from 'get-port'; export class P2PNetworkTest { private snapshotManager: ISnapshotManager; @@ -58,7 +58,7 @@ export class P2PNetworkTest { } static async create(testName: string, numberOfNodes: number, basePort?: number) { - const port = basePort || await getPort(); + const port = basePort || (await getPort()); const bootstrapNode = await createBootstrapNode(port); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 0512a92a6be..b3a9050e341 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -8,10 +8,10 @@ import { type BootnodeConfig, BootstrapNode, createLibP2PPeerId } from '@aztec/p import { type PXEService } from '@aztec/pxe'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import getPort from 'get-port'; import { generatePrivateKey } from 'viem/accounts'; import { getPrivateKeyFromIndex } from './utils.js'; -import getPort from 'get-port'; export interface NodeContext { node: AztecNodeService; @@ -95,7 +95,7 @@ export async function createValidatorConfig( dataDirectory?: string, ) { peerIdPrivateKey = peerIdPrivateKey ?? generatePeerIdPrivateKey(); - port = port ?? await getPort(); + port = port ?? (await getPort()); const privateKey = getPrivateKeyFromIndex(accountIndex); const privateKeyHex: `0x${string}` = `0x${privateKey!.toString('hex')}`; From 871a3194460910148ec895f509e81431087c6452 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 12:07:30 +0000 Subject: [PATCH 064/123] fix --- yarn-project/end-to-end/Earthfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index fe2be42c453..5a9acce49d8 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -2,7 +2,7 @@ VERSION 0.8 e2e-p2p: LOCALLY - RUN "./scripts/e2e_test.sh ./src/e2e_p2p/ --runInBand" + RUN ./scripts/e2e_test.sh "./src/e2e_p2p/ --runInBand" e2e-l1-with-wall-time: LOCALLY From 20730dcb5657e1c3660a4518dc0c42beabacf4a9 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 12:07:55 +0000 Subject: [PATCH 065/123] fix --- yarn-project/end-to-end/Earthfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index fe2be42c453..5a9acce49d8 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -2,7 +2,7 @@ VERSION 0.8 e2e-p2p: LOCALLY - RUN "./scripts/e2e_test.sh ./src/e2e_p2p/ --runInBand" + RUN ./scripts/e2e_test.sh "./src/e2e_p2p/ --runInBand" e2e-l1-with-wall-time: LOCALLY From 481537ea1a88925a204e206f020ff0d6324e3f9a Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 13:14:52 +0000 Subject: [PATCH 066/123] fix: enable additional params in e2e tests --- yarn-project/end-to-end/Earthfile | 2 +- yarn-project/end-to-end/scripts/e2e_test.sh | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 5a9acce49d8..a4a7680a770 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -2,7 +2,7 @@ VERSION 0.8 e2e-p2p: LOCALLY - RUN ./scripts/e2e_test.sh "./src/e2e_p2p/ --runInBand" + RUN ./scripts/e2e_test.sh ./src/e2e_p2p/ --runInBand e2e-l1-with-wall-time: LOCALLY diff --git a/yarn-project/end-to-end/scripts/e2e_test.sh b/yarn-project/end-to-end/scripts/e2e_test.sh index 5faec984d16..2284fa188f2 100755 --- a/yarn-project/end-to-end/scripts/e2e_test.sh +++ b/yarn-project/end-to-end/scripts/e2e_test.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Usage: ./e2e_test.sh +# Usage: ./e2e_test.sh <...extra_args> # Optional environment variables: # HARDWARE_CONCURRENCY (default: "") @@ -8,6 +8,9 @@ set -eu # Main positional parameter TEST="$1" +shift +EXTRA_ARGS="$@" + # Default values for environment variables HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" AZTEC_DOCKER_TAG=$(git rev-parse HEAD) @@ -17,4 +20,4 @@ if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotoco exit 1 fi -docker run -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG "$TEST" +docker run -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG "$TEST" "$EXTRA_ARGS" From f4ab1bd16db850e79859caf0565f1cb5186a227f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 13:33:49 +0000 Subject: [PATCH 067/123] fix: e2e_test script --- yarn-project/end-to-end/scripts/e2e_test.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/scripts/e2e_test.sh b/yarn-project/end-to-end/scripts/e2e_test.sh index 2284fa188f2..981a449a390 100755 --- a/yarn-project/end-to-end/scripts/e2e_test.sh +++ b/yarn-project/end-to-end/scripts/e2e_test.sh @@ -9,7 +9,6 @@ set -eu # Main positional parameter TEST="$1" shift -EXTRA_ARGS="$@" # Default values for environment variables HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" @@ -20,4 +19,4 @@ if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotoco exit 1 fi -docker run -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG "$TEST" "$EXTRA_ARGS" +docker run -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG "$TEST" $@ From a6ca6c7dd85b2f02a09c8975f8d9304ae52d448f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 15:20:24 +0000 Subject: [PATCH 068/123] chore: use new snapshotting infra --- .../end-to-end/src/e2e_p2p/p2p_network.ts | 35 ++- .../end-to-end/src/e2e_p2p/reex.test.ts | 78 +++++ yarn-project/end-to-end/src/e2e_p2p/shared.ts | 30 ++ .../end-to-end/src/e2e_p2p_reex.test.ts | 283 ------------------ 4 files changed, 141 insertions(+), 285 deletions(-) create mode 100644 yarn-project/end-to-end/src/e2e_p2p/reex.test.ts delete mode 100644 yarn-project/end-to-end/src/e2e_p2p_reex.test.ts diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 88d11fe1aad..3cc171e46be 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,5 +1,5 @@ import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { EthCheatCodes } from '@aztec/aztec.js'; +import { AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; @@ -15,8 +15,10 @@ import { generateNodePrivateKeys, generatePeerIdPrivateKeys, } from '../fixtures/setup_p2p_test.js'; -import { type ISnapshotManager, type SubsystemsContext, createSnapshotManager } from '../fixtures/snapshot_manager.js'; +import { type ISnapshotManager, type SubsystemsContext, addAccounts, createSnapshotManager } from '../fixtures/snapshot_manager.js'; import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import { SpamContract } from '@aztec/noir-contracts.js'; +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; export class P2PNetworkTest { private snapshotManager: ISnapshotManager; @@ -30,6 +32,10 @@ export class P2PNetworkTest { public bootstrapNodeEnr: string = ''; + // The re-execution test needs a wallet and a spam contract + public wallet?: AccountWalletWithSecretKey; + public spamContract?: SpamContract; + constructor( testName: string, public bootstrapNode: BootstrapNode, @@ -112,6 +118,31 @@ export class P2PNetworkTest { }); } + async setupAccount() { + await this.snapshotManager.snapshot("setup-account", addAccounts(1, this.logger, false), async ({accountKeys}, ctx) => { + const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); + await Promise.all(accountManagers.map(a => a.register())); + const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + this.wallet = wallets[0]; + }); + } + + async deploySpamContract() { + await this.snapshotManager.snapshot("add-spam-contract", async () => { + if (!this.wallet) { + throw new Error('Call snapshot t.setupAccount before deploying account contract'); + } + + const spamContract = await SpamContract.deploy(this.wallet).send().deployed(); + return {contractAddress: spamContract.address}; + }, async ({contractAddress}) => { + if (!this.wallet) { + throw new Error('Call snapshot t.setupAccount before deploying account contract'); + } + this.spamContract = await SpamContract.at(contractAddress, this.wallet); + }); + } + async setup() { this.ctx = await this.snapshotManager.setup(); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts new file mode 100644 index 00000000000..2dc1b4f9b7a --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -0,0 +1,78 @@ + +import { type AztecNodeService } from '@aztec/aztec-node'; +import { sleep } from '@aztec/aztec.js'; + +import fs from 'fs'; + +import { createNodes } from '../fixtures/setup_p2p_test.js'; +import { P2PNetworkTest } from './p2p_network.js'; +import { submitComplexTxsTo } from './shared.js'; +import { afterEach, beforeAll, describe, it } from '@jest/globals'; + +// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds +const NUM_NODES = 4; +const NUM_TXS_PER_NODE = 4; +const NUM_TXS_PER_BLOCK = 4; +const BOOT_NODE_UDP_PORT = 41000; + +const DATA_DIR = './data/re-ex'; + +describe('e2e_p2p_reex', () => { + let t: P2PNetworkTest; + + beforeAll(async () => { + t = await P2PNetworkTest.create('e2e_p2p_reex', NUM_NODES, BOOT_NODE_UDP_PORT); + await t.applyBaseSnapshots(); + await t.setupAccount(); + await t.deploySpamContract(); + await t.setup(); + }); + + afterEach(async () => { + await t.teardown(); + for (let i = 0; i < NUM_NODES; i++) { + fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); + } + }); + + it('validators should re-execute transactions before attesting', async () => { + // create the bootstrap node for the network + if (!t.bootstrapNodeEnr) { + throw new Error('Bootstrap node ENR is not available'); + } + + t.ctx.aztecNodeConfig.validatorReEx = true; + + const nodes: AztecNodeService[] = await createNodes( + t.ctx.aztecNodeConfig, + t.peerIdPrivateKeys, + t.bootstrapNodeEnr, + NUM_NODES, + BOOT_NODE_UDP_PORT, + ); + + // wait a bit for peers to discover each other + await sleep(4000); + + // tODO: use a tx with nested calls + const txs = await submitComplexTxsTo(t.logger, t.spamContract!, NUM_TXS_PER_NODE); + nodes.forEach(node => { + node.getSequencer()?.updateSequencerConfig( { + minTxsPerBlock: NUM_TXS_PER_BLOCK, + maxTxsPerBlock: NUM_TXS_PER_BLOCK, + }); + }); + + // now ensure that all txs were successfully mined + await Promise.all( + txs.map(async (tx, i) => { + t.logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ); + + // shutdown all nodes. + await t.stopNodes(nodes); + }); +}); + diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index d7d4f3bac3f..8f37853ccb3 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -6,6 +6,36 @@ import { Fr, GrumpkinScalar } from '@aztec/foundation/fields'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; import { type NodeContext } from '../fixtures/setup_p2p_test.js'; +import { expect } from '@jest/globals'; +import { SpamContract } from '@aztec/noir-contracts.js'; + + // submits a set of transactions to the provided Private eXecution Environment (PXE) + export const submitComplexTxsTo = async (logger: DebugLogger, spamContract: SpamContract, numTxs: number) => { + const txs: SentTx[] = []; + + const seed = 1234n + const spamCount = 15 + for (let i = 0; i < numTxs; i++) { + // TODO: check out batch call for deployments + + // Send a public mint tx - this will be minted from the token contract to the pxe account + // const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() + const tx = spamContract.methods.spam(seed + BigInt(i * spamCount), spamCount, false, true ).send() + const txHash = await tx.getTxHash(); + + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; +}; // creates an instance of the PXE and submit a given number of transactions to it. export const createPXEServiceAndSubmitTransactions = async ( diff --git a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts deleted file mode 100644 index 02765198386..00000000000 --- a/yarn-project/end-to-end/src/e2e_p2p_reex.test.ts +++ /dev/null @@ -1,283 +0,0 @@ -import { getSchnorrAccount } from '@aztec/accounts/schnorr'; -import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { - AccountWalletWithSecretKey, - CompleteAddress, - type DebugLogger, - type DeployL1Contracts, - EthCheatCodes, - Fr, - GrumpkinScalar, - type SentTx, - TxStatus, - sleep, -} from '@aztec/aztec.js'; -import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; -import { RollupAbi } from '@aztec/l1-artifacts'; -import { type BootstrapNode } from '@aztec/p2p'; -import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; - -import { jest } from '@jest/globals'; -import fs from 'fs'; -import { getContract } from 'viem'; -import { privateKeyToAccount } from 'viem/accounts'; -import { SpamContract, TokenContract } from '@aztec/noir-contracts.js'; - -import { - createBootstrapNode, - createNodes, - generatePeerIdPrivateKey, - generatePeerIdPrivateKeys, -} from './fixtures/setup_p2p_test.js'; -import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; -import { createAccounts } from '@aztec/accounts/testing'; -import { createDebugLogger } from '@aztec/foundation/log'; -import { addAccounts, createSnapshotManager, ISnapshotManager, SubsystemsContext } from './fixtures/snapshot_manager.js'; -import { getRandomPort } from '@aztec/foundation/testing'; - -// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds -const NUM_NODES = 2; -const NUM_TXS_PER_BLOCK = 4; -const BOOT_NODE_UDP_PORT = 40400; - -const PEER_ID_PRIVATE_KEYS = generatePeerIdPrivateKeys(NUM_NODES); - -// const { E2E_DATA_PATH: dataPath } = process.env; -const dataPath = "./data-e2e_p2p_reex"; - -class ReExTest { - private snapshotManager: ISnapshotManager; - private logger: DebugLogger; - private accounts: CompleteAddress[] = []; - - constructor(testName: string) { - this.logger = createDebugLogger(`aztec:e2e_p2p_reex:${testName}`); - this.snapshotManager = createSnapshotManager(`e2e_p2p_reex/${testName}`, dataPath); - } - - async applyBaseSnapshot() { - await this.snapshotManager.snapshot("register validators", async () => { - - - - } ) - } -} - -// TODO: really need to setup a snapshot for this -describe('e2e_p2p_network', () => { - let ctx: SubsystemsContext; - let logger: DebugLogger; - let teardown: () => Promise; - let bootstrapNode: BootstrapNode; - let bootstrapNodeEnr: string; - let wallet: AccountWalletWithSecretKey; - - let snapshotManager: ISnapshotManager; - - let spam: SpamContract; - - beforeEach(async () => { - logger = createDebugLogger('aztec:e2e_p2p_reex'); - - bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); - bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - - // Put the original node on the bootstrap list so that it transactions we send to it are gossiped to other nodes - const port = await getRandomPort() || BOOT_NODE_UDP_PORT - 1; - const peerIdPrivateKey = generatePeerIdPrivateKey(); - const bootstrapConfig = { - p2pEnabled: true, - bootstrapNodes: [bootstrapNodeEnr], - peerIdPrivateKey, - udpListenAddress: `0.0.0.0:${port}`, - tcpListenAddress: `0.0.0.0:${port}`, - tcpAnnounceAddress: `127.0.0.1:${port}`, - udpAnnounceAddress: `127.0.0.1:${port}`, - }; - - const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); - const initialValidators = [EthAddress.fromString(account.address)]; - // If we give the initial node a bootstrap node, then we can attempt to connect through it - - snapshotManager = createSnapshotManager(`e2e_p2p_reex`, dataPath, bootstrapConfig, { - assumeProvenThrough: undefined, - initialValidators, - }); - - await snapshotManager.snapshot("setup-account", addAccounts(1, logger, false), async ({accountKeys}, ctx) => { - const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); - await Promise.all(accountManagers.map(a => a.register())); - const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); - wallet = wallets[0]; - }); - - await snapshotManager.snapshot("add-spam-contract", async () => { - const spamContract = await SpamContract.deploy(wallet).send().deployed(); - return {contractAddress: spamContract.address}; - }, async ({contractAddress}) => { - spam = await SpamContract.at(contractAddress, wallet); - }); - - // await snapshotManager.snapshot( - // "Add Nodes as validators", - // async ({deployL1ContractsValues}) => { - // const rollup = getContract({ - // address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), - // abi: RollupAbi, - // client: deployL1ContractsValues.walletClient, - // }); - - // As a snapshot gets sent before hand, our account nonce may have increased. - // const nonce = await deployL1ContractsValues.publicClient.getTransactionCount({ address: }); - // const senderAddress = deployL1ContractsValues.walletClient.account.address; - // const nonce = await deployL1ContractsValues.publicClient.getTransactionCount({ address: senderAddress }); - // for (let i = 0; i < NUM_NODES; i++) { - // const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); - // // await rollup.write.addValidator([account.address], { nonce: nonce + i }); - // // console.log("nonce", nonce + i); - // await rollup.write.addValidator([account.address]); - // } - // } - // No need to do anything after setup is complete - // ) - - ctx = await snapshotManager.setup(); - - // TODO: could i setup this timejump? - //@note Now we jump ahead to the next epoch such that the validator committee is picked - // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! - // Which means that the validator set will still be empty! So anyone can propose. - const rollup = getContract({ - address: ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), - abi: RollupAbi, - client: ctx.deployL1ContractsValues.walletClient, - }); - - // TODO(md): we want this also in a snapshot but i am having nonce issues - for (let i = 0; i < NUM_NODES; i++) { - const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); - // await rollup.write.addValidator([account.address], { nonce: nonce + i }); - // console.log("nonce", nonce + i); - await rollup.write.addValidator([account.address]); - } - - - ctx.aztecNodeConfig.minTxsPerBlock = 1; - ctx.aztecNodeConfig.maxTxsPerBlock = NUM_TXS_PER_BLOCK; // - - const slotsInEpoch = await rollup.read.EPOCH_DURATION(); - const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); - const cheatCodes = new EthCheatCodes(ctx.aztecNodeConfig.l1RpcUrl); - try { - await cheatCodes.warp(Number(timestamp)); - } catch (err) { - logger.debug('Warp failed, time already satisfied'); - } - - // Send and await a tx to make sure we mine a block for the warp to correctly progress. - await ctx.deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: await ctx.deployL1ContractsValues.walletClient.sendTransaction({ to: account.address, value: 1n, account }), - }); - - - }); - - const stopNodes = async (bootstrap: BootstrapNode, nodes: AztecNodeService[]) => { - for (const node of nodes) { - await node.stop(); - } - await bootstrap.stop(); - }; - - afterEach(() => teardown()); - - afterAll(() => { - for (let i = 0; i < NUM_NODES; i++) { - fs.rmSync(`./data-${i}`, { recursive: true, force: true }); - } - }); - - it.only('should rollup txs from all peers with re-execution', async () => { - // create the bootstrap node for the network - if (!bootstrapNodeEnr) { - throw new Error('Bootstrap node ENR is not available'); - } - - ctx.aztecNodeConfig.validatorReEx = true; - - // create our network of nodes and submit txs into each of them - // the number of txs per node and the number of txs per rollup - // should be set so that the only way for rollups to be built - // is if the txs are successfully gossiped around the nodes. - - const nodes: AztecNodeService[] = await createNodes( - ctx.aztecNodeConfig, - PEER_ID_PRIVATE_KEYS, - bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); - - - console.log("writing nodes ", nodes.length); - for (const node of nodes) { - const enr = await node.getEncodedEnr(); - console.log("node enr", enr?.toString()); - } - - - // spam = await SpamContract.deploy(wallet).send().deployed(); - - // wait a bit for peers to discover each other - await sleep(4000); - - // tODO: use a tx with nested calls - const txs = await submitTxsTo(NUM_TXS_PER_BLOCK); - nodes.forEach(node => { - node.getSequencer()?.updateSequencerConfig( { - minTxsPerBlock: NUM_TXS_PER_BLOCK, - maxTxsPerBlock: NUM_TXS_PER_BLOCK, - }); - }); - - // now ensure that all txs were successfully mined - await Promise.all( - txs.map(async (tx, i) => { - logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); - return tx.wait(); - }), - ); - - // shutdown all nodes. - await stopNodes(bootstrapNode, nodes); - }); - - // submits a set of transactions to the provided Private eXecution Environment (PXE) - const submitTxsTo = async (numTxs: number) => { - const txs: SentTx[] = []; - - const seed = 1234n - const spamCount = 15 - for (let i = 0; i < numTxs; i++) { - // TODO: check out batch call for deployments - - // Send a public mint tx - this will be minted from the token contract to the pxe account - // const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() - const tx = spam.methods.spam(seed + BigInt(i * spamCount), spamCount, false, true ).send() - const txHash = await tx.getTxHash(); - - logger.info(`Tx sent with hash ${txHash}`); - const receipt = await tx.getReceipt(); - expect(receipt).toEqual( - expect.objectContaining({ - status: TxStatus.PENDING, - error: '', - }), - ); - logger.info(`Receipt received for ${txHash}`); - txs.push(tx); - } - return txs; - }; -}); From af95b82e21a5c39dec397c77ceebbcf2f00cf876 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 1 Oct 2024 17:01:57 +0000 Subject: [PATCH 069/123] fix: wait on validator deploymnets --- yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 88d11fe1aad..2be61a85cbd 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -83,11 +83,21 @@ export class P2PNetworkTest { client: deployL1ContractsValues.walletClient, }); + const txHashes: `0x${string}`[] = []; for (let i = 0; i < this.numberOfNodes; i++) { const account = privateKeyToAccount(this.nodePrivateKeys[i]!); - await rollup.write.addValidator([account.address]); + const txHash = await rollup.write.addValidator([account.address]); + txHashes.push(txHash); this.logger.debug(`Adding ${account.address} as validator`); } + // Wait for all the transactions adding validators to be mined + await Promise.all( + txHashes.map(txHash => + deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: txHash, + }), + ), + ); //@note Now we jump ahead to the next epoch such that the validator committee is picked // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! From 500ab6f6611ebdac0698d03827df0d37ff3605a3 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 11:13:47 +0000 Subject: [PATCH 070/123] temp --- .../src/structs/public_data_update_request.ts | 3 +- .../end-to-end/src/e2e_p2p/p2p_network.ts | 28 +++++++-- .../end-to-end/src/e2e_p2p/reex.test.ts | 57 ++++++++++++++++--- .../end-to-end/src/fixtures/setup_p2p_test.ts | 34 +++++++++-- .../src/fixtures/snapshot_manager.ts | 1 - .../orchestrator/block-building-helpers.ts | 2 + .../src/sequencer/sequencer.ts | 2 +- .../src/public/light_public_processor.ts | 15 ++++- 8 files changed, 117 insertions(+), 25 deletions(-) diff --git a/yarn-project/circuits.js/src/structs/public_data_update_request.ts b/yarn-project/circuits.js/src/structs/public_data_update_request.ts index 79bc9546950..383ad813ca1 100644 --- a/yarn-project/circuits.js/src/structs/public_data_update_request.ts +++ b/yarn-project/circuits.js/src/structs/public_data_update_request.ts @@ -6,6 +6,7 @@ import { inspect } from 'util'; import { GeneratorIndex } from '../constants.gen.js'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { ContractStorageUpdateRequest } from './contract_storage_update_request.js'; +import { computePublicDataTreeLeafSlot } from '../hash/hash.js'; /** * Write operations on the public data tree including the previous value. @@ -79,7 +80,7 @@ export class PublicDataUpdateRequest { } static fromContractStorageUpdateRequest(contractAddress: AztecAddress, updateRequest: ContractStorageUpdateRequest) { - const leafSlot = poseidon2HashWithSeparator([contractAddress, updateRequest.storageSlot], GeneratorIndex.PUBLIC_LEAF_INDEX); + const leafSlot = computePublicDataTreeLeafSlot(contractAddress, updateRequest.storageSlot); return new PublicDataUpdateRequest(leafSlot, updateRequest.newValue, updateRequest.counter); diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 3cc171e46be..194cd9bc80a 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -3,7 +3,7 @@ import { AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; -import { type BootstrapNode } from '@aztec/p2p'; +import { BootnodeConfig, createLibP2PPeerId, type BootstrapNode } from '@aztec/p2p'; import getPort from 'get-port'; import { getContract } from 'viem'; @@ -11,6 +11,10 @@ import { privateKeyToAccount } from 'viem/accounts'; import { createBootstrapNode, + createBootstrapNodeConfig, + createBootstrapNodeConfigFromPrivateKey, + createBootstrapNodeFromConfig, + createBootstrapNodeFromPrivateKey, createValidatorConfig, generateNodePrivateKeys, generatePeerIdPrivateKeys, @@ -63,9 +67,11 @@ export class P2PNetworkTest { }); } - static async create(testName: string, numberOfNodes: number, basePort?: number) { + static async create(testName: string, numberOfNodes: number, basePort?: number, baseBootnodePrivateKey?: Uint8Array) { const port = basePort || (await getPort()); - const bootstrapNode = await createBootstrapNode(port); + const bootnodePrivateKey = baseBootnodePrivateKey || ( await createLibP2PPeerId()).privateKey!; + + const bootstrapNode = await createBootstrapNodeFromPrivateKey(bootnodePrivateKey, port); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); @@ -73,6 +79,7 @@ export class P2PNetworkTest { return new P2PNetworkTest( testName, + // TODO: clean up all of this bootnode stuff bootstrapNode, port, numberOfNodes, @@ -89,11 +96,21 @@ export class P2PNetworkTest { client: deployL1ContractsValues.walletClient, }); + this.logger.verbose(`Adding ${this.numberOfNodes} validators`); + + const txHashes: `0x${string}`[] = []; for (let i = 0; i < this.numberOfNodes; i++) { const account = privateKeyToAccount(this.nodePrivateKeys[i]!); - await rollup.write.addValidator([account.address]); - this.logger.debug(`Adding ${account.address} as validator`); + const txHash = await rollup.write.addValidator([account.address]); + txHashes.push(txHash); + + this.logger.verbose(`Adding ${account.address} as validator`); + console.log("Added validator", account.address); } + await Promise.all(txHashes.map(txHash => deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: txHash + }))); + //@note Now we jump ahead to the next epoch such that the validator committee is picked // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! @@ -115,6 +132,7 @@ export class P2PNetworkTest { account: this.baseAccount, }), }); + }); } diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 2dc1b4f9b7a..12d9fa1c8f3 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -8,27 +8,44 @@ import { createNodes } from '../fixtures/setup_p2p_test.js'; import { P2PNetworkTest } from './p2p_network.js'; import { submitComplexTxsTo } from './shared.js'; import { afterEach, beforeAll, describe, it } from '@jest/globals'; +import { RollupAbi } from '@aztec/l1-artifacts'; +import { getContract } from 'viem'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds -const NUM_NODES = 4; -const NUM_TXS_PER_NODE = 4; -const NUM_TXS_PER_BLOCK = 4; +const NUM_NODES = 2; +const NUM_TXS_PER_NODE = 2; const BOOT_NODE_UDP_PORT = 41000; const DATA_DIR = './data/re-ex'; describe('e2e_p2p_reex', () => { let t: P2PNetworkTest; + // We hardcode a bootstrap node private key, such that the key recovered from node config ( the node that has deployed the contract ) + // Can find the bootstrap node and connect to the other peers we create + // This is a unique requirement for this test as we store a contract that points to a node we cache in setup. + // - we just need this cached node to know where to find the bootstrap node once it comes online + const BOOTSTRAP_NODE_PRIVATE_KEY: Uint8Array = new Uint8Array([ + 8, 2, 18, 32, 173, 104, 140, 243, + 166, 150, 169, 63, 45, 233, 242, 106, + 59, 18, 136, 215, 18, 148, 98, 2, + 219, 17, 61, 127, 100, 160, 122, 188, + 255, 52, 17, 196 + ]); beforeAll(async () => { - t = await P2PNetworkTest.create('e2e_p2p_reex', NUM_NODES, BOOT_NODE_UDP_PORT); - await t.applyBaseSnapshots(); + + t = await P2PNetworkTest.create('e2e_p2p_reex', NUM_NODES, BOOT_NODE_UDP_PORT, BOOTSTRAP_NODE_PRIVATE_KEY); + t.logger.verbose('Setup account'); await t.setupAccount(); + t.logger.verbose('Deploy spam contract'); await t.deploySpamContract(); + t.logger.verbose('Apply base snapshots'); + await t.applyBaseSnapshots(); + t.logger.verbose('Setup nodes'); await t.setup(); }); - afterEach(async () => { + afterAll(async () => { await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); @@ -41,6 +58,28 @@ describe('e2e_p2p_reex', () => { throw new Error('Bootstrap node ENR is not available'); } + // TODO(md): make this part of a snapshot + + + // Check the validators in the l1 contract + // TODO: this block is temp + const rollup = getContract({ + address: t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: t.ctx.deployL1ContractsValues.publicClient, + }); + const validators = await rollup.read.getValidators(); + const committee = await rollup.read.getCurrentEpochCommittee(); + const slot = await rollup.read.getCurrentSlot(); + const validatorCount = await rollup.read.getValidatorCount(); + console.log("at the beginning") + console.log('Validators length:', validators.length); + console.log('Committee length:', committee.length); + console.log('Slot:', slot); + console.log('Validator count:', validatorCount); + + t.logger.verbose(`Validators: ${validators.map(v => v.toString()).join(', ')}`); + t.ctx.aztecNodeConfig.validatorReEx = true; const nodes: AztecNodeService[] = await createNodes( @@ -55,13 +94,13 @@ describe('e2e_p2p_reex', () => { await sleep(4000); // tODO: use a tx with nested calls - const txs = await submitComplexTxsTo(t.logger, t.spamContract!, NUM_TXS_PER_NODE); nodes.forEach(node => { node.getSequencer()?.updateSequencerConfig( { - minTxsPerBlock: NUM_TXS_PER_BLOCK, - maxTxsPerBlock: NUM_TXS_PER_BLOCK, + minTxsPerBlock: NUM_TXS_PER_NODE, + maxTxsPerBlock: NUM_TXS_PER_NODE, }); }); + const txs = await submitComplexTxsTo(t.logger, t.spamContract!, NUM_TXS_PER_NODE); // now ensure that all txs were successfully mined await Promise.all( diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 3166521e70b..2fc404271d6 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -126,17 +126,41 @@ export async function createValidatorConfig( return nodeConfig; } -export async function createBootstrapNode(port: number) { - const peerId = await createLibP2PPeerId(); - const bootstrapNode = new BootstrapNode(); - const config: BootnodeConfig = { +export function createBootstrapNodeConfigFromPrivateKey(privateKey: Uint8Array, port: number): BootnodeConfig{ + return { udpListenAddress: `0.0.0.0:${port}`, udpAnnounceAddress: `127.0.0.1:${port}`, - peerIdPrivateKey: Buffer.from(peerId.privateKey!).toString('hex'), + peerIdPrivateKey: Buffer.from(privateKey).toString('hex'), minPeerCount: 10, maxPeerCount: 100, }; +} + +export async function createBootstrapNodeConfig(port: number): Promise { + const peerId = await createLibP2PPeerId(); + return createBootstrapNodeConfigFromPrivateKey(peerId.privateKey!, port); +} + +export async function createBootstrapNodeFromConfig(config: BootnodeConfig) { + const bootstrapNode = new BootstrapNode(); await bootstrapNode.start(config); return bootstrapNode; } + +export async function createBootstrapNode(port: number) { + const bootstrapNode = new BootstrapNode(); + const config = await createBootstrapNodeConfig(port); + await bootstrapNode.start(config); + + return bootstrapNode; +} + +export async function createBootstrapNodeFromPrivateKey(privateKey: Uint8Array, port: number): Promise { + const bootstrapNode = new BootstrapNode(); + const config = createBootstrapNodeConfigFromPrivateKey(privateKey, port); + await bootstrapNode.start(config); + + return bootstrapNode; +} + diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index b7db9c695c7..a624077a588 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -420,7 +420,6 @@ async function setupFromFresh( async function setupFromState(statePath: string, logger: Logger): Promise { logger.verbose(`Initializing with saved state at ${statePath}...`); - // Load config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig = JSON.parse( readFileSync(`${statePath}/aztec_node_config.json`, 'utf-8'), diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index e674fc25e2f..3b7c6b83b3f 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -427,6 +427,8 @@ export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: Merkl ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), ); + console.log("allPublicDataWrites", allPublicDataWrites); + const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 51465e6463d..7d6fd4f15b5 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -519,7 +519,7 @@ export class Sequencer { this.log.debug(`Attesting committee length ${committee.length}`); if (committee.length === 0) { - this.log.debug(`Attesting committee length is 0, skipping`); + this.log.verbose(`Attesting committee length is 0, skipping`); return undefined; } diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index 62ac23fef69..fea6d23c3c4 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -137,10 +137,16 @@ export class LightPublicProcessor { const allNullifiers = padArrayEnd([...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()); const allNoteHashes = padArrayEnd([...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], Fr.ZERO, MAX_NOTE_HASHES_PER_TX); - const allPublicDataUpdateRequests = padArrayEnd([...nonEmptyNonRevertiblePublicDataUpdateRequests, ...nonEmptyTxPublicDataUpdateRequests, ...nonEmptyPublicDataWrites], PublicDataUpdateRequest.empty(), MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX); + const allPublicDataUpdateRequests = [...nonEmptyNonRevertiblePublicDataUpdateRequests, ...nonEmptyTxPublicDataUpdateRequests, ...nonEmptyPublicDataWrites]; + const uniquePublicDataUpdateRequests = allPublicDataUpdateRequests.filter((leaf, index, self) => + index === self.findIndex((t) => t.leafSlot.equals(leaf.leafSlot)) + ); + const paddedUniquePublicDataUpdateRequests = padArrayEnd(uniquePublicDataUpdateRequests, PublicDataUpdateRequest.empty(), MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX); + + // TODO: refactor this - const allPublicDataWrites = allPublicDataUpdateRequests.map( + const allPublicDataWrites = publicDataWrites.map( ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), ); @@ -303,6 +309,7 @@ export class LightPublicProcessor { enqueuedCallNewNullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); enqueuedCallNewNoteHashes.push(...getNonEmptyItems(res.noteHashes).map(n => n.scope(res.executionRequest.contractAddress))); const tempPub = getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req)); + // console.log("tempPub", tempPub); enqueuedCallPublicDataWrites.push(...tempPub); // TODO: do for the nested executions @@ -330,7 +337,9 @@ export class LightPublicProcessor { // TODO: WHY to we need to do this? yucky yucky, reversal doesnt feel quite right const newNullifiers = txCallNewNullifiers.reverse().flat(); const newNoteHashes = txCallNewNoteHashes.reverse().flat(); - const newPublicDataWrites = txCallPublicDataWrites.reverse().flat(); + const newPublicDataWrites = txCallPublicDataWrites.flat(); + + console.log("newPublicDataWrites", newPublicDataWrites); const returning = { nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), From 50e3ba7d199aa27873f2eceeea072fc0b1fdc4c0 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 16:11:25 +0000 Subject: [PATCH 071/123] fix: reex with nested calls --- .../end-to-end/src/e2e_p2p/reex.test.ts | 4 +- .../orchestrator/block-building-helpers.ts | 2 - .../src/public/light_public_processor.ts | 314 ++++++++++-------- 3 files changed, 178 insertions(+), 142 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 12d9fa1c8f3..2b6369b94a3 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -12,8 +12,8 @@ import { RollupAbi } from '@aztec/l1-artifacts'; import { getContract } from 'viem'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds -const NUM_NODES = 2; -const NUM_TXS_PER_NODE = 2; +const NUM_NODES = 4; +const NUM_TXS_PER_NODE = 1; const BOOT_NODE_UDP_PORT = 41000; const DATA_DIR = './data/re-ex'; diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 3b7c6b83b3f..e674fc25e2f 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -427,8 +427,6 @@ export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: Merkl ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), ); - console.log("allPublicDataWrites", allPublicDataWrites); - const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index fea6d23c3c4..8ff8190b9ef 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -21,22 +21,18 @@ export class LightPublicProcessorFactory { private telemetryClient: TelemetryClient ) {} - // TODO: using the same interface as the orther public processor factory - // But can probably do alot better here in debt cleanup public async createWithSyncedState( targetBlockNumber: number, maybeHistoricalHeader: Header | undefined, globalVariables: GlobalVariables, txValidator: TxValidator ) { - // TODO: should this be here? - // TODO: is this safe? // Make sure the world state synchronizer is synced await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); + // We will sync again whenever the block is created + // This could be an inefficiency const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); - - // TODO(md): we need to first make sure that we are synced const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); @@ -51,15 +47,25 @@ export class InvalidTransactionsFound extends Error { } } +type NewStateUpdates = { + nullifiers: Fr[]; + noteHashes: Fr[]; + publicDataWrites: PublicDataUpdateRequest[]; +} + /** * A variant of the public processor that does not run the kernel circuits + * + * TODO(make issues): + * - Gas accounting is not complete + * - Calculating the state root (archive) is not complete */ export class LightPublicProcessor { public publicExecutor: PublicExecutor; - // State + // TODO(md): not used private blockGasLeft: Gas; private pendingNullifiers: Nullifier[]; @@ -80,6 +86,10 @@ export class LightPublicProcessor { } + addNullifiers(nullifiers: Nullifier[]) { + this.pendingNullifiers.push(...nullifiers); + } + // NOTE: does not have the archive public async getTreeSnapshots() { return Promise.all([ @@ -90,76 +100,25 @@ export class LightPublicProcessor { ]); } - // Execution invariants - // If any of the public calls are invalid, then we need to roll them back + /** + * Process a list of transactions + * + * If any of the transactions are invalid, then we throw an error + * @param txs - The transactions to process + */ public async process(txs: Tx[]) { - // TODO(md): Note these will need to padded to a power of 2 + // TODO(md): do we need dummy transactions? txs = txs.map(tx => Tx.clone(tx)); - // TODO: maybe there is some extra validation to make sure that we do not overrun - // the checks that the kernel should have been doing? - // If not we add them to the vm execution - // TODO(md): skiping tx validation for now as validator is not well defined - // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + this.validateTransactions(txs); - // // If any of the transactions are invalid, we throw an error - // if (invalidTxs.length > 0) { - // console.log("invalid txs found"); - // throw new InvalidTransactionsFound(); - // } for (const tx of txs) { if (tx.hasPublicCalls()) { - const publicExecutionResults = await this.executePublicCalls(tx); - // TODO: handle this - if (!publicExecutionResults) { - throw new Error("Public execution results are undefined"); - } - - // TODO: throw if this is not defined - if (tx.data.forPublic) { - // tODO: put this in a function - const {nullifiers, newNoteHashes, publicDataWrites} = publicExecutionResults; - const {nullifiers: nonRevertibleNullifiers, noteHashes: nonRevertibleNoteHashes, publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests} = tx.data.forPublic!.endNonRevertibleData; - const {nullifiers: txNullifiers, noteHashes: txNoteHashes, publicDataUpdateRequests: txPublicDataUpdateRequests} = tx.data.forPublic!.end; - - // tODO: make sure not RE removing empty items - const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); - const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); - const publicNullifiers = getNonEmptyItems(nullifiers); - - const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => n.value); - const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => n.value); - const publicNoteHashes = getNonEmptyItems(newNoteHashes); - - const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); - const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter(p => !p.isEmpty()); - const nonEmptyPublicDataWrites = publicDataWrites.filter(p => !p.isEmpty()); - - const allNullifiers = padArrayEnd([...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()); - const allNoteHashes = padArrayEnd([...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], Fr.ZERO, MAX_NOTE_HASHES_PER_TX); - const allPublicDataUpdateRequests = [...nonEmptyNonRevertiblePublicDataUpdateRequests, ...nonEmptyTxPublicDataUpdateRequests, ...nonEmptyPublicDataWrites]; - const uniquePublicDataUpdateRequests = allPublicDataUpdateRequests.filter((leaf, index, self) => - index === self.findIndex((t) => t.leafSlot.equals(leaf.leafSlot)) - ); - const paddedUniquePublicDataUpdateRequests = padArrayEnd(uniquePublicDataUpdateRequests, PublicDataUpdateRequest.empty(), MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX); - - - - // TODO: refactor this - const allPublicDataWrites = publicDataWrites.map( - ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), - ); - - await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, allNoteHashes); - await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, allNullifiers, NULLIFIER_SUBTREE_HEIGHT); - - const beingWritten = allPublicDataWrites.map(x => x.toBuffer()); - await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, beingWritten, PUBLIC_DATA_SUBTREE_HEIGHT); - - } + await this.executeEnqueuedCallsAndApplyStateUpdates(tx); } else { await this.applyPrivateStateUpdates(tx); + // TODO(md): do i need to do this? // Apply empty public data writes const emptyPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, () => PublicDataTreeLeaf.empty()); await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, emptyPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); @@ -168,27 +127,27 @@ export class LightPublicProcessor { } - // Ok: this is going to be done in the larger light block builder???? - // Theres no point in doing it here - // If the public part fails, then we need to rollback the trees here - // Can the public part use commitments that have just been created? + validateTransactions(txs: Tx[]) { + // TODO: Run tx validator checks + // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + + // TODO(md): check the last valid block number is close + + // If any of the transactions are invalid, we throw an error + // if (invalidTxs.length > 0) { + // console.log("invalid txs found"); + // throw new InvalidTransactionsFound(); + } // There is an assumption based on how the block builder works, that the transactions // provided here CANNOT revert, else they are not added to the block as the kernels // will fail // This will change whenever the vm is changed to be able to revert public async applyPrivateStateUpdates(tx: Tx) { - // TODO(md): do the last block number check??? - - // TODO: read this from forPublic or forRollup??? - // We could probably run this after if the tx reverts - // Does the rollup currently insert reverted transactions?? let insertionPromises: Promise[] = []; if (tx.data.forRollup) { - // TODO: do we insert all or just non empty?? const {nullifiers, noteHashes} = tx.data.forRollup.end; if (nullifiers) { - // TODO: note this returns a few things for the circuits to use insertionPromises.push(this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT)); } @@ -200,11 +159,74 @@ export class LightPublicProcessor { await Promise.all(insertionPromises); } + async executeEnqueuedCallsAndApplyStateUpdates(tx: Tx) { + const publicExecutionResults = await this.executePublicCalls(tx); + // TODO: handle this + if (!publicExecutionResults) { + throw new Error("Public execution results are undefined"); + } + + if (tx.data.forPublic) { + // tODO: put this in a function + const stateUpdates = this.accumulateTransactionAndExecutedStateUpdates(tx, publicExecutionResults); + await this.writeStateUpdates(stateUpdates); + } else { + throw new Error("Transaction does not have public calls"); + } + } + + /** + * Take the state updates from each of the transactions and merge them together + * + * 1. Non Revertible calls come first + * 2. Private updates come second + * 3. Public updates come third + */ + accumulateTransactionAndExecutedStateUpdates(tx: Tx, publicExecutionResults: NewStateUpdates) { + const {nullifiers, noteHashes, publicDataWrites} = publicExecutionResults; + const {nullifiers: nonRevertibleNullifiers, noteHashes: nonRevertibleNoteHashes, publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests} = tx.data.forPublic!.endNonRevertibleData; + const {nullifiers: txNullifiers, noteHashes: txNoteHashes, publicDataUpdateRequests: txPublicDataUpdateRequests} = tx.data.forPublic!.end; + + const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); + const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); + const publicNullifiers = getNonEmptyItems(nullifiers); + + const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => n.value); + const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => n.value); + const publicNoteHashes = getNonEmptyItems(noteHashes); + + const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); + const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter(p => !p.isEmpty()); + const nonEmptyPublicDataWrites = publicDataWrites.filter(p => !p.isEmpty()); + + const allNullifiers = padArrayEnd([...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], Fr.ZERO, MAX_NULLIFIERS_PER_TX); + const allNoteHashes = padArrayEnd([...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], Fr.ZERO, MAX_NOTE_HASHES_PER_TX); + const allPublicDataUpdateRequests = [...nonEmptyNonRevertiblePublicDataUpdateRequests, ...nonEmptyTxPublicDataUpdateRequests, ...nonEmptyPublicDataWrites]; + + return { + nullifiers: allNullifiers, + noteHashes: allNoteHashes, + publicDataWrites: allPublicDataUpdateRequests + } + } + + async writeStateUpdates(stateUpdates: NewStateUpdates) { + const {nullifiers, noteHashes, publicDataWrites} = stateUpdates; + + // Convert public state toto the tree leaves + const allPublicDataWrites = publicDataWrites.map( + ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), + ); + + await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); + await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT); + await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); + + } + // NOTES: // - gas accounting needs to be done here and - // - maybe make another transaction context that stores the gas for a trnsactions - public async executePublicCalls(tx: Tx) { // Transactions are split up into a number of parts // 1. Non revertible calls - these run with the public kernel setup @@ -219,26 +241,24 @@ export class LightPublicProcessor { const publicCalls = tx.getRevertiblePublicExecutionRequests(); const teardownCall = tx.getPublicTeardownExecutionRequest()!; - // TODO(md): use exceptions to manage differing control flow of reverts - - // TODO: there is some other stuff done in the public processor - // rationalise it and find out where else it can be done + // TODO(md): gas const transactionGas = tx.data.constants.txContext.gasSettings.getLimits(); // Store the successful results for db insertions - // TODO: do we write the new state updates inbetween the calls? + const nonRevertiblePublicExecutionResults: PublicExecutionResult[] = []; const publicExecutionResults: PublicExecutionResult[] = []; - // TODO: Check is trnasaction gas updated where it should? + let publicKernelOutput = tx.data.toPublicKernelCircuitPublicInputs(); + let startSideEffectCounter = publicKernelOutput.endSideEffectCounter + 1; // Execute the non revertible calls - for (const call of nonRevertibleCalls) { - const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); + const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers, Fr.ZERO, startSideEffectCounter); + startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; if (!this.temporaryDidCallOrNestedCallRevert(res)) { this.addNullifiers(res.nullifiers); - publicExecutionResults.push(res); + nonRevertiblePublicExecutionResults.push(res); this.worldStateDB.checkpoint(); } else { await this.worldStateDB.removeNewContracts(tx); @@ -252,9 +272,9 @@ export class LightPublicProcessor { for (const call of publicCalls) { // Execute the non revertible calls - const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); + const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers, Fr.ZERO, startSideEffectCounter); + startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; - // TODO: tidy this up and below if (!this.temporaryDidCallOrNestedCallRevert(res)) { this.addNullifiers(res.nullifiers); publicExecutionResults.push(res); @@ -267,27 +287,22 @@ export class LightPublicProcessor { } if (teardownCall) { - const res = await this.publicExecutor.simulate(teardownCall, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers); + const res = await this.publicExecutor.simulate(teardownCall, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers, Fr.ZERO, startSideEffectCounter); if (!this.temporaryDidCallOrNestedCallRevert(res)) { this.addNullifiers(res.nullifiers); publicExecutionResults.push(res); } else { - // Similarly, if a teardown call fails, it will revert + // Similarly, if a public calls fail, it will revert // back to the setup state await this.worldStateDB.removeNewContracts(tx); await this.worldStateDB.rollbackToCheckpoint(); } } - // TODO(md): think about bringing the journal model into this await this.worldStateDB.commit(); - // On success lower the block gas left -- add a check!!! - this.blockGasLeft = this.blockGasLeft.sub(transactionGas); - - - return this.aggregatePublicExecutionResults(publicExecutionResults); + return this.aggregateResults(nonRevertiblePublicExecutionResults, publicExecutionResults); } // This is just getting the private state updates after executing them @@ -297,36 +312,15 @@ export class LightPublicProcessor { let txCallNewNoteHashes: ScopedNoteHash[][] = []; let txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; - let j = 0; - // TODO: sort these by side effect counter per request - // THE later ones then go first - which does not make sense to me here for (const res of results) { let enqueuedCallNewNullifiers = []; let enqueuedCallNewNoteHashes = []; let enqueuedCallPublicDataWrites = []; - // TODO: I assume that these will need to be ordered by their side effect counter + // Scope the nullifiers, note hashes and public data writes to the contract address enqueuedCallNewNullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); enqueuedCallNewNoteHashes.push(...getNonEmptyItems(res.noteHashes).map(n => n.scope(res.executionRequest.contractAddress))); - const tempPub = getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req)); - // console.log("tempPub", tempPub); - enqueuedCallPublicDataWrites.push(...tempPub); - - // TODO: do for the nested executions - let i = 0; - for (const nested of res.nestedExecutions) { - - const newNullifiers = getNonEmptyItems(nested.nullifiers).map(n => n.scope(nested.executionRequest.contractAddress)); - const newNoteHashes = getNonEmptyItems(nested.noteHashes).map(n => n.scope(nested.executionRequest.contractAddress)); - - enqueuedCallNewNullifiers.push(...newNullifiers); - enqueuedCallNewNoteHashes.push(...newNoteHashes); - enqueuedCallPublicDataWrites.push(...getNonEmptyItems(nested.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(nested.executionRequest.contractAddress, req))); - } - - enqueuedCallNewNullifiers = this.sortBySideEffectCounter(enqueuedCallNewNullifiers); - enqueuedCallNewNoteHashes = this.sortBySideEffectCounter(enqueuedCallNewNoteHashes); - enqueuedCallPublicDataWrites = this.sortBySideEffectCounter(enqueuedCallPublicDataWrites); + enqueuedCallPublicDataWrites.push(...getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req))); txCallNewNullifiers.push(enqueuedCallNewNullifiers); txCallNewNoteHashes.push(enqueuedCallNewNoteHashes); @@ -335,38 +329,82 @@ export class LightPublicProcessor { // Reverse // TODO: WHY to we need to do this? yucky yucky, reversal doesnt feel quite right - const newNullifiers = txCallNewNullifiers.reverse().flat(); - const newNoteHashes = txCallNewNoteHashes.reverse().flat(); + let newNullifiers = txCallNewNullifiers.reverse().flat(); + let newNoteHashes = txCallNewNoteHashes.reverse().flat(); const newPublicDataWrites = txCallPublicDataWrites.flat(); - console.log("newPublicDataWrites", newPublicDataWrites); + // Squash data writes + let uniquePublicDataWrites = this.removeDuplicatesFromStart(newPublicDataWrites); + uniquePublicDataWrites = this.sortBySideEffectCounter(uniquePublicDataWrites); + + newNullifiers = this.sortBySideEffectCounter(newNullifiers); + newNoteHashes = this.sortBySideEffectCounter(newNoteHashes); const returning = { nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), newNoteHashes: newNoteHashes.map(n => siloNoteHash(n.contractAddress, n.value)), - publicDataWrites: newPublicDataWrites + publicDataWrites: uniquePublicDataWrites }; return returning; } - // Sort by side effect counter, where the lowest is first - // TODO: refactor - sortBySideEffectCounter(items: T[]) { - return items.sort((a, b) => a.counter - b.counter); + collectNestedExecutions(result: PublicExecutionResult) { + const nestedExecutions: PublicExecutionResult[] = []; + for (const res of result.nestedExecutions) { + nestedExecutions.push(...this.collectNestedExecutions(res)); + } + return [result, ...nestedExecutions]; } - addNullifiers(nullifiers: Nullifier[]) { - this.pendingNullifiers.push(...nullifiers); + + aggregateResults(nonRevertibleResults: PublicExecutionResult[], revertibleResults: PublicExecutionResult[]) { + const nonRevertibleNestedExecutions = nonRevertibleResults.flatMap(res => this.collectNestedExecutions(res)); + const nonRevertible = this.aggregatePublicExecutionResults(nonRevertibleNestedExecutions); + + const revertibleNestedExecutions = revertibleResults.flatMap(res => this.collectNestedExecutions(res)); + const revertible = this.aggregatePublicExecutionResults(revertibleNestedExecutions); + + return { + nullifiers: [...nonRevertible.nullifiers, ...revertible.nullifiers], + noteHashes: [...nonRevertible.newNoteHashes, ...revertible.newNoteHashes], + publicDataWrites: [...nonRevertible.publicDataWrites, ...revertible.publicDataWrites] + } } - async checkpointOrRollback(reverted: boolean): Promise { - if (reverted) { - await this.worldStateDB.rollbackToCheckpoint(); - } else { - await this.worldStateDB.checkpoint(); + /** + * Sort by side effect counter, where the lowest is first + * @param items + * @returns + */ + sortBySideEffectCounter(items: T[]) { + return items.sort((a, b) => a.counter - b.counter); + } + + /** + * Remove duplicates keeping the oldest item, where duplicates are defined by the leaf slot and side effect counter + * @param items + * @returns + */ + removeDuplicatesFromStart(items: PublicDataUpdateRequest[]) { + const slotMap: Map = new Map(); + + for (const obj of items) { + const { leafSlot, sideEffectCounter } = obj; + + if (!slotMap.has(leafSlot.toBigInt())) { + slotMap.set(leafSlot.toBigInt(), obj); + } + if (sideEffectCounter > slotMap.get(leafSlot.toBigInt())!.sideEffectCounter) { + // Remove the first instance + slotMap.delete(leafSlot.toBigInt()); + slotMap.set(leafSlot.toBigInt(), obj); + } } + + return Array.from(slotMap.values()); } + // This is a temporary method, in our current kernel model, // nested calls will trigger an entire execution reversion // if any nested call reverts From bd2d7652d76a5ad6ca9d2c1470f715c99829b670 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 16:19:00 +0000 Subject: [PATCH 072/123] fmt --- .../src/structs/public_data_update_request.ts | 4 +-- .../end-to-end/src/e2e_p2p/p2p_network.ts | 2 +- yarn-project/end-to-end/src/e2e_p2p/shared.ts | 2 +- .../src/public/light_public_processor.test.ts | 6 ++-- .../src/public/light_public_processor.ts | 28 +++++++++---------- yarn-project/validator-client/src/factory.ts | 6 ++-- .../validator-client/src/validator.test.ts | 2 +- .../validator-client/src/validator.ts | 4 +-- 8 files changed, 27 insertions(+), 27 deletions(-) diff --git a/yarn-project/circuits.js/src/structs/public_data_update_request.ts b/yarn-project/circuits.js/src/structs/public_data_update_request.ts index 383ad813ca1..06f561dcce6 100644 --- a/yarn-project/circuits.js/src/structs/public_data_update_request.ts +++ b/yarn-project/circuits.js/src/structs/public_data_update_request.ts @@ -4,8 +4,8 @@ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/ import { inspect } from 'util'; import { GeneratorIndex } from '../constants.gen.js'; -import { AztecAddress } from '@aztec/foundation/aztec-address'; -import { ContractStorageUpdateRequest } from './contract_storage_update_request.js'; +import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { type ContractStorageUpdateRequest } from './contract_storage_update_request.js'; import { computePublicDataTreeLeafSlot } from '../hash/hash.js'; /** diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 194cd9bc80a..1ba67612a74 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,5 +1,5 @@ import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; +import { type AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index 8f37853ccb3..e2eefe23456 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -7,7 +7,7 @@ import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig import { type NodeContext } from '../fixtures/setup_p2p_test.js'; import { expect } from '@jest/globals'; -import { SpamContract } from '@aztec/noir-contracts.js'; +import { type SpamContract } from '@aztec/noir-contracts.js'; // submits a set of transactions to the provided Private eXecution Environment (PXE) export const submitComplexTxsTo = async (logger: DebugLogger, spamContract: SpamContract, numTxs: number) => { diff --git a/yarn-project/simulator/src/public/light_public_processor.test.ts b/yarn-project/simulator/src/public/light_public_processor.test.ts index 50eb1cdb6f8..3a37fcf5e61 100644 --- a/yarn-project/simulator/src/public/light_public_processor.test.ts +++ b/yarn-project/simulator/src/public/light_public_processor.test.ts @@ -1,9 +1,9 @@ import { PublicDataWrite, - PublicExecutionRequest, + type PublicExecutionRequest, SimulationError, type TreeInfo, - TxValidator, + type TxValidator, mockTx, } from '@aztec/circuit-types'; import { @@ -34,7 +34,7 @@ import { LightPublicProcessor, type PublicExecutionResult, type PublicExecutor, - WorldStateDB, + type WorldStateDB, } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MerkleTreeOperations } from '@aztec/world-state'; diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index 8ff8190b9ef..a44d90db31b 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -1,13 +1,13 @@ // A minimal version of the public processor - that does not have the fluff -import { MerkleTreeId, PublicDataWrite, Tx, TxValidator, WorldStateSynchronizer } from "@aztec/circuit-types"; -import { AztecAddress, Gas, getNonEmptyItems, GlobalVariables, Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NoteHash, Nullifier, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, PublicDataUpdateRequest, ScopedNoteHash, ScopedNullifier } from "@aztec/circuits.js"; -import { MerkleTreeOperations } from "@aztec/world-state"; +import { MerkleTreeId, PublicDataWrite, Tx, type TxValidator, type WorldStateSynchronizer } from "@aztec/circuit-types"; +import { AztecAddress, Gas, getNonEmptyItems, type GlobalVariables, type Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NoteHash, type Nullifier, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, PublicDataUpdateRequest, type ScopedNoteHash, type ScopedNullifier } from "@aztec/circuits.js"; +import { type MerkleTreeOperations } from "@aztec/world-state"; import { WorldStateDB } from "./public_db_sources.js"; import { PublicExecutor } from "./executor.js"; -import { TelemetryClient } from "@aztec/telemetry-client"; -import { PublicExecutionResult } from "./execution.js"; -import { ContractDataSource } from "@aztec/types/contracts"; +import { type TelemetryClient } from "@aztec/telemetry-client"; +import { type PublicExecutionResult } from "./execution.js"; +import { type ContractDataSource } from "@aztec/types/contracts"; import { padArrayEnd } from "@aztec/foundation/collection"; import { Fr } from "@aztec/foundation/fields"; import { siloNoteHash, siloNullifier } from "@aztec/circuits.js/hash"; @@ -144,7 +144,7 @@ export class LightPublicProcessor { // will fail // This will change whenever the vm is changed to be able to revert public async applyPrivateStateUpdates(tx: Tx) { - let insertionPromises: Promise[] = []; + const insertionPromises: Promise[] = []; if (tx.data.forRollup) { const {nullifiers, noteHashes} = tx.data.forRollup.end; if (nullifiers) { @@ -248,7 +248,7 @@ export class LightPublicProcessor { const nonRevertiblePublicExecutionResults: PublicExecutionResult[] = []; const publicExecutionResults: PublicExecutionResult[] = []; - let publicKernelOutput = tx.data.toPublicKernelCircuitPublicInputs(); + const publicKernelOutput = tx.data.toPublicKernelCircuitPublicInputs(); let startSideEffectCounter = publicKernelOutput.endSideEffectCounter + 1; // Execute the non revertible calls @@ -308,14 +308,14 @@ export class LightPublicProcessor { // This is just getting the private state updates after executing them // TODO(md): think about these aggregatePublicExecutionResults(results: PublicExecutionResult[]) { - let txCallNewNullifiers: ScopedNullifier[][] = []; - let txCallNewNoteHashes: ScopedNoteHash[][] = []; - let txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; + const txCallNewNullifiers: ScopedNullifier[][] = []; + const txCallNewNoteHashes: ScopedNoteHash[][] = []; + const txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; for (const res of results) { - let enqueuedCallNewNullifiers = []; - let enqueuedCallNewNoteHashes = []; - let enqueuedCallPublicDataWrites = []; + const enqueuedCallNewNullifiers = []; + const enqueuedCallNewNoteHashes = []; + const enqueuedCallPublicDataWrites = []; // Scope the nullifiers, note hashes and public data writes to the contract address enqueuedCallNewNullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 4fad4d4a0f0..78281c198f9 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -5,12 +5,12 @@ import { generatePrivateKey } from 'viem/accounts'; import { type ValidatorClientConfig } from './config.js'; import { ValidatorClient } from './validator.js'; import { LightPublicProcessorFactory } from '@aztec/simulator'; -import { ArchiveSource } from '@aztec/archiver'; +import { type ArchiveSource } from '@aztec/archiver'; import { createWorldStateSynchronizer } from '../../world-state/dest/synchronizer/factory.js'; -import { TelemetryClient } from '../../telemetry-client/src/telemetry.js'; +import { type TelemetryClient } from '../../telemetry-client/src/telemetry.js'; import { WorldStateConfig } from '../../world-state/dest/synchronizer/config.js'; import { DataStoreConfig } from '@aztec/kv-store/utils'; -import { WorldStateSynchronizer } from '@aztec/circuit-types'; +import { type WorldStateSynchronizer } from '@aztec/circuit-types'; // TODO: TODO: make the archiver optional??? export async function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P, worldStateSynchronizer: WorldStateSynchronizer, archiver: ArchiveSource, telemetry: TelemetryClient) { diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 10cf1fc2dc5..aa93aa804b4 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -6,7 +6,7 @@ import { makeHeader } from '@aztec/circuits.js/testing'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type P2P } from '@aztec/p2p'; -import { LightPublicProcessor, LightPublicProcessorFactory } from '@aztec/simulator'; +import { type LightPublicProcessor, type LightPublicProcessorFactory } from '@aztec/simulator'; import { describe, expect, it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 0c4c3b6a8d1..434889ce7f4 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,4 +1,4 @@ -import { MerkleTreeId, Tx, type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; +import { MerkleTreeId, type Tx, type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; @@ -18,7 +18,7 @@ import { } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; -import { LightPublicProcessor, LightPublicProcessorFactory } from '@aztec/simulator'; +import { LightPublicProcessor, type LightPublicProcessorFactory } from '@aztec/simulator'; import { Timer } from '@aztec/foundation/timer'; export interface Validator { From 6f0aaeab9739196aa3783a27f5fa7130b0541fce Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 17:24:29 +0000 Subject: [PATCH 073/123] clean --- yarn-project/validator-client/src/factory.ts | 9 +--- .../validator-client/src/validator.ts | 49 ++++++++++--------- 2 files changed, 28 insertions(+), 30 deletions(-) diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 78281c198f9..62ef84449f3 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -6,26 +6,19 @@ import { type ValidatorClientConfig } from './config.js'; import { ValidatorClient } from './validator.js'; import { LightPublicProcessorFactory } from '@aztec/simulator'; import { type ArchiveSource } from '@aztec/archiver'; -import { createWorldStateSynchronizer } from '../../world-state/dest/synchronizer/factory.js'; import { type TelemetryClient } from '../../telemetry-client/src/telemetry.js'; -import { WorldStateConfig } from '../../world-state/dest/synchronizer/config.js'; -import { DataStoreConfig } from '@aztec/kv-store/utils'; import { type WorldStateSynchronizer } from '@aztec/circuit-types'; -// TODO: TODO: make the archiver optional??? export async function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P, worldStateSynchronizer: WorldStateSynchronizer, archiver: ArchiveSource, telemetry: TelemetryClient) { if (config.disableValidator) { return undefined; } - // TODO: should this be exposed via a flag? if (config.validatorPrivateKey === undefined || config.validatorPrivateKey === '') { config.validatorPrivateKey = generatePrivateKey(); } + // We only craete a public processor factory if re-execution is enabled if (config.validatorReEx) { - // It need to be able to create a public processor from somewhere? - // What on earth does it need to do this -> check the sequencer code for this - const publicProcessorFactory = new LightPublicProcessorFactory( worldStateSynchronizer, archiver, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 434889ce7f4..126a8d27ab5 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -102,7 +102,7 @@ export class ValidatorClient implements Validator { if (error instanceof TransactionsNotAvailableError) { this.log.error(`Transactions not available, skipping attestation ${error.message}`); } else { - // TODO(md): this is a catch all error handler + // Catch all error handler this.log.error(`Failed to attest to proposal: ${error.message}`); } return undefined; @@ -115,57 +115,62 @@ export class ValidatorClient implements Validator { return this.validationService.attestToProposal(proposal); } - // We do not want to run the private state updates until we know if the public has failed or not + /** + * Re-execute the transactions in the proposal and check that the state updates match the header state + * @param proposal - The proposal to re-execute + */ async reExecuteTransactions(proposal: BlockProposal) { - // TODO(md): currently we are not running the rollups, so cannot calcualte the archive - // - use pallas new work to do this const {header, txHashes} = proposal.payload; - const txs = await Promise.all(txHashes.map(tx => this.p2pClient.getTxByHash(tx))); - const filteredTransactions = txs.filter(tx => tx !== undefined); + const txs = (await Promise.all(txHashes.map(tx => this.p2pClient.getTxByHash(tx)))).filter(tx => tx !== undefined); - // TODO: messy af - think about this more - if (filteredTransactions.length !== txHashes.length) { + // If we cannot request all of the transactions, then we should fail + if (txs.length !== txHashes.length) { this.log.error(`Failed to get transactions from the network: ${txHashes.join(', ')}`); throw new TransactionsNotAvailableError(txHashes); } + // Assertion: This check will fail if re-execution is not enabled if (!this.lightPublicProcessorFactory) { throw new PublicProcessorNotProvidedError(); } // TODO(md): make the transaction validator here // TODO(md): for now breaking the rules and makeing the world state sync public on the factory - const txValidator = new DoubleSpendTxValidator((this.lightPublicProcessorFactory as any).worldStateSynchronizer); - // We force the state to be synced here - const targetBlockNumber = header.globalVariables.blockNumber.toNumber() - 1; + // We sync the state to the previous block as the public processor will not process the current block + const targetBlockNumber = header.globalVariables.blockNumber.toNumber() - 1; this.log.verbose(`Re-ex: Syncing state to block number ${targetBlockNumber}`); + const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState(targetBlockNumber, undefined, header.globalVariables, txValidator); this.log.verbose(`Re-ex: Re-executing transactions`); const timer = new Timer(); - await lightProcessor.process(filteredTransactions as Tx[]); + await lightProcessor.process(txs as Tx[]); this.log.verbose(`Re-ex: Re-execution complete ${timer.ms()}ms`); - // TODO(md): update this to check the archive matches + // This function will throw an error if state updates do not match + await this.checkReExecutedStateRoots(header, lightProcessor); + } + /** + * Check that the state updates match the header state + * + * TODO(md): + * - Check archive + * - Check l1 to l2 messages + * + * @param header - The header to check + * @param lightProcessor - The light processor to check + */ + private async checkReExecutedStateRoots(header: Header, lightProcessor: LightPublicProcessor) { const [ newNullifierTree, newNoteHashTree, newPublicDataTree, - // newL1ToL2MessageTree, ] = await lightProcessor.getTreeSnapshots(); - // Check the header has this information - // TODO: replace with archive check once we have it - - // TODO: l1 to l2 messages need to be inserted separately at the start - // if (header.state.l1ToL2MessageTree.root.toBuffer() !== (await newL1ToL2MessageTree).root) { - // this.log.error(`Re-ex: l1ToL2MessageTree does not match`); - // throw new ReExStateMismatchError(); - // } if (!header.state.partial.nullifierTree.root.toBuffer().equals(newNullifierTree.root)) { this.log.error(`Re-ex: nullifierTree does not match, ${header.state.partial.nullifierTree.root.toBuffer().toString('hex')} !== ${newNullifierTree.root.toString('hex')}`); throw new ReExStateMismatchError(); From 5c50f07e71d9bbc7d4c47e56a809cfd2de2f746c Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:33:57 +0000 Subject: [PATCH 074/123] fix: anvil failures issue --- .../src/e2e_p2p/gossip_network.test.ts | 13 ++++---- .../end-to-end/src/e2e_p2p/p2p_network.ts | 17 ++++++++-- .../src/e2e_p2p/rediscovery.test.ts | 20 ++++++----- .../end-to-end/src/e2e_p2p/reqresp_tx.test.ts | 13 ++++---- .../end-to-end/src/fixtures/setup_p2p_test.ts | 33 ++++++++++++++----- 5 files changed, 62 insertions(+), 34 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 8c70f8d8582..2520cd225b4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -4,7 +4,7 @@ import { sleep } from '@aztec/aztec.js'; import fs from 'fs'; import { type NodeContext, createNodes } from '../fixtures/setup_p2p_test.js'; -import { P2PNetworkTest } from './p2p_network.js'; +import { P2PNetworkTest, WAIT_FOR_TX_TIMEOUT } from './p2p_network.js'; import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds @@ -16,14 +16,16 @@ const DATA_DIR = './data/gossip'; describe('e2e_p2p_network', () => { let t: P2PNetworkTest; + let nodes: AztecNodeService[]; - beforeAll(async () => { + beforeEach(async () => { t = await P2PNetworkTest.create('e2e_p2p_network', NUM_NODES, BOOT_NODE_UDP_PORT); await t.applyBaseSnapshots(); await t.setup(); }); afterEach(async () => { + await t.stopNodes(nodes); await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); @@ -41,7 +43,7 @@ describe('e2e_p2p_network', () => { // is if the txs are successfully gossiped around the nodes. const contexts: NodeContext[] = []; t.logger.info('Creating nodes'); - const nodes: AztecNodeService[] = await createNodes( + nodes = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, @@ -65,13 +67,10 @@ describe('e2e_p2p_network', () => { contexts.flatMap((context, i) => context.txs.map(async (tx, j) => { t.logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - return tx.wait(); + return tx.wait({ timeout: WAIT_FOR_TX_TIMEOUT }); }), ), ); t.logger.info('All transactions mined'); - - // shutdown all nodes. - await t.stopNodes(nodes); }); }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 2be61a85cbd..13e51b6db2d 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,6 +1,6 @@ import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; import { EthCheatCodes } from '@aztec/aztec.js'; -import { ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; +import { AZTEC_SLOT_DURATION, ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; import { type BootstrapNode } from '@aztec/p2p'; @@ -10,7 +10,7 @@ import { getContract } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; import { - createBootstrapNode, + createBootstrapNodeFromPrivateKey, createValidatorConfig, generateNodePrivateKeys, generatePeerIdPrivateKeys, @@ -18,6 +18,10 @@ import { import { type ISnapshotManager, type SubsystemsContext, createSnapshotManager } from '../fixtures/snapshot_manager.js'; import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; +// Use a fixed bootstrap node private key so that we can re-use the same snapshot and the nodes can find each other +const BOOTSTRAP_NODE_PRIVATE_KEY = '080212208f988fc0899e4a73a5aee4d271a5f20670603a756ad8d84f2c94263a6427c591'; +export const WAIT_FOR_TX_TIMEOUT = AZTEC_SLOT_DURATION * 3; + export class P2PNetworkTest { private snapshotManager: ISnapshotManager; private baseAccount; @@ -59,7 +63,8 @@ export class P2PNetworkTest { static async create(testName: string, numberOfNodes: number, basePort?: number) { const port = basePort || (await getPort()); - const bootstrapNode = await createBootstrapNode(port); + + const bootstrapNode = await createBootstrapNodeFromPrivateKey(BOOTSTRAP_NODE_PRIVATE_KEY, port); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); @@ -90,6 +95,12 @@ export class P2PNetworkTest { txHashes.push(txHash); this.logger.debug(`Adding ${account.address} as validator`); } + + // Remove the setup validator + const initialValidatorAddress = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`).address; + const txHash = await rollup.write.removeValidator([initialValidatorAddress]); + txHashes.push(txHash); + // Wait for all the transactions adding validators to be mined await Promise.all( txHashes.map(txHash => diff --git a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts index e2ffbb3739d..2f0ec7ddfdc 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts @@ -4,7 +4,7 @@ import { sleep } from '@aztec/aztec.js'; import fs from 'fs'; import { type NodeContext, createNode, createNodes } from '../fixtures/setup_p2p_test.js'; -import { P2PNetworkTest } from './p2p_network.js'; +import { P2PNetworkTest, WAIT_FOR_TX_TIMEOUT } from './p2p_network.js'; import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds @@ -16,14 +16,16 @@ const DATA_DIR = './data/rediscovery'; describe('e2e_p2p_rediscovery', () => { let t: P2PNetworkTest; + let nodes: AztecNodeService[]; - beforeAll(async () => { + beforeEach(async () => { t = await P2PNetworkTest.create('e2e_p2p_rediscovery', NUM_NODES, BOOT_NODE_UDP_PORT); await t.applyBaseSnapshots(); await t.setup(); }); - afterAll(async () => { + afterEach(async () => { + await t.stopNodes(nodes); await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); @@ -32,7 +34,7 @@ describe('e2e_p2p_rediscovery', () => { it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; - const nodes: AztecNodeService[] = await createNodes( + nodes = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, @@ -56,7 +58,7 @@ describe('e2e_p2p_rediscovery', () => { await node.stop(); t.logger.info(`Node ${i} stopped`); await sleep(1200); - // TODO: make a restart nodes function + const newNode = await createNode( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys[i], @@ -68,6 +70,7 @@ describe('e2e_p2p_rediscovery', () => { t.logger.info(`Node ${i} restarted`); newNodes.push(newNode); } + nodes = newNodes; // wait a bit for peers to discover each other await sleep(2000); @@ -78,16 +81,15 @@ describe('e2e_p2p_rediscovery', () => { } // now ensure that all txs were successfully mined + await Promise.all( contexts.flatMap((context, i) => context.txs.map(async (tx, j) => { t.logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - return tx.wait(); + return tx.wait({ timeout: WAIT_FOR_TX_TIMEOUT }); }), ), ); - // shutdown all nodes. - await t.stopNodes(newNodes); }); -}); +}); \ No newline at end of file diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts index 3e62a2fed71..42fa3fbd9c3 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts @@ -5,7 +5,7 @@ import { jest } from '@jest/globals'; import fs from 'fs'; import { type NodeContext, createNodes } from '../fixtures/setup_p2p_test.js'; -import { P2PNetworkTest } from './p2p_network.js'; +import { P2PNetworkTest, WAIT_FOR_TX_TIMEOUT } from './p2p_network.js'; import { createPXEServiceAndSubmitTransactions } from './shared.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds @@ -17,14 +17,16 @@ const DATA_DIR = './data/data-reqresp'; describe('e2e_p2p_reqresp_tx', () => { let t: P2PNetworkTest; + let nodes: AztecNodeService[]; - beforeAll(async () => { + beforeEach(async () => { t = await P2PNetworkTest.create('e2e_p2p_reqresp_tx', NUM_NODES, BOOT_NODE_UDP_PORT); await t.applyBaseSnapshots(); await t.setup(); }); - afterAll(async () => { + afterEach(async () => { + await t.stopNodes(nodes); await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); @@ -55,7 +57,7 @@ describe('e2e_p2p_reqresp_tx', () => { const contexts: NodeContext[] = []; t.logger.info('Creating nodes'); - const nodes: AztecNodeService[] = await createNodes( + nodes = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, @@ -93,13 +95,12 @@ describe('e2e_p2p_reqresp_tx', () => { contexts.flatMap((context, i) => context.txs.map(async (tx, j) => { t.logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - await tx.wait(); + await tx.wait({ timeout: WAIT_FOR_TX_TIMEOUT }); t.logger.info(`Tx ${i}-${j}: ${await tx.getTxHash()} has been mined`); return await tx.getTxHash(); }), ), ); t.logger.info('All transactions mined'); - await t.stopNodes(nodes); }); }); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index b3a9050e341..b33981bdc1b 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -30,7 +30,7 @@ export function generateNodePrivateKeys(startIndex: number, numberOfNodes: numbe } export function generatePeerIdPrivateKey(): string { - // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ + // magic number is multiaddr prefix: https://multiformats.io/multiaddr/ for secp256k1 return '08021220' + generatePrivateKey().substr(2, 66); } @@ -47,13 +47,13 @@ export async function createNodes( peerIdPrivateKeys: string[], bootstrapNodeEnr: string, numNodes: number, - bootNodePort?: number, + bootNodePort: number, dataDirectory?: string, ): Promise { const nodePromises = []; for (let i = 0; i < numNodes; i++) { // We run on ports from the bootnode upwards if a port if provided, otherwise we get a random port - const port = bootNodePort ? bootNodePort + i + 1 : await getPort(); + const port = bootNodePort + i + 1; const dataDir = dataDirectory ? `${dataDirectory}-${i}` : undefined; const nodePromise = createNode(config, peerIdPrivateKeys[i], port, bootstrapNodeEnr, i, dataDir); @@ -123,17 +123,32 @@ export async function createValidatorConfig( return nodeConfig; } -export async function createBootstrapNode(port: number) { - const peerId = await createLibP2PPeerId(); - const bootstrapNode = new BootstrapNode(); - const config: BootnodeConfig = { +export async function createBootstrapNodeConfig(privateKey: string, port: number): Promise { + return { udpListenAddress: `0.0.0.0:${port}`, udpAnnounceAddress: `127.0.0.1:${port}`, - peerIdPrivateKey: Buffer.from(peerId.privateKey!).toString('hex'), + peerIdPrivateKey: privateKey, minPeerCount: 10, maxPeerCount: 100, }; - await bootstrapNode.start(config); +} + +export async function createBootstrapNodeFromPrivateKey(privateKey: string, port: number) { + const config = await createBootstrapNodeConfig(privateKey, port); + return await startBootstrapNode(config); +} + +export async function createBootstrapNode(port: number) { + const peerId = await createLibP2PPeerId(); + const config = await createBootstrapNodeConfig(Buffer.from(peerId.privateKey!).toString('hex'), port); + + return await startBootstrapNode(config); +} +async function startBootstrapNode(config: BootnodeConfig) { + const bootstrapNode = new BootstrapNode(); + await bootstrapNode.start(config); return bootstrapNode; } + + From 01e8ecaad8940403f9c080f2e662d7c4dd17f236 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:37:02 +0000 Subject: [PATCH 075/123] fmt --- .../end-to-end/src/e2e_p2p/rediscovery.test.ts | 3 +-- .../end-to-end/src/fixtures/setup_p2p_test.ts | 18 ++++++++---------- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts index 2f0ec7ddfdc..51c222e238a 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts @@ -90,6 +90,5 @@ describe('e2e_p2p_rediscovery', () => { }), ), ); - }); -}); \ No newline at end of file +}); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index b33981bdc1b..659443a1445 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -42,7 +42,7 @@ export function generatePeerIdPrivateKeys(numberOfPeers: number): string[] { return peerIdPrivateKeys; } -export async function createNodes( +export function createNodes( config: AztecNodeConfig, peerIdPrivateKeys: string[], bootstrapNodeEnr: string, @@ -123,7 +123,7 @@ export async function createValidatorConfig( return nodeConfig; } -export async function createBootstrapNodeConfig(privateKey: string, port: number): Promise { +export function createBootstrapNodeConfig(privateKey: string, port: number): BootnodeConfig { return { udpListenAddress: `0.0.0.0:${port}`, udpAnnounceAddress: `127.0.0.1:${port}`, @@ -133,16 +133,16 @@ export async function createBootstrapNodeConfig(privateKey: string, port: number }; } -export async function createBootstrapNodeFromPrivateKey(privateKey: string, port: number) { - const config = await createBootstrapNodeConfig(privateKey, port); - return await startBootstrapNode(config); +export function createBootstrapNodeFromPrivateKey(privateKey: string, port: number): Promise { + const config = createBootstrapNodeConfig(privateKey, port); + return startBootstrapNode(config); } -export async function createBootstrapNode(port: number) { +export async function createBootstrapNode(port: number): Promise { const peerId = await createLibP2PPeerId(); - const config = await createBootstrapNodeConfig(Buffer.from(peerId.privateKey!).toString('hex'), port); + const config = createBootstrapNodeConfig(Buffer.from(peerId.privateKey!).toString('hex'), port); - return await startBootstrapNode(config); + return startBootstrapNode(config); } async function startBootstrapNode(config: BootnodeConfig) { @@ -150,5 +150,3 @@ async function startBootstrapNode(config: BootnodeConfig) { await bootstrapNode.start(config); return bootstrapNode; } - - From 863457cdc06c00c8a77398855e72392460dd88c6 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:42:45 +0000 Subject: [PATCH 076/123] bump number of nodes in reqresp test --- yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts index 42fa3fbd9c3..f173c6d738e 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts @@ -85,7 +85,7 @@ describe('e2e_p2p_reqresp_tx', () => { t.logger.info('Submitting transactions'); // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. // If the shuffling algorithm changes, then this will need to be updated. - for (let i = 0; i < 2; i++) { + for (let i = 0; i < NUM_NODES; i++) { const context = await createPXEServiceAndSubmitTransactions(t.logger, nodes[i], NUM_TXS_PER_NODE); contexts.push(context); } From e416dd41d688a830012dce87f7fc65dd2ed4ccf8 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 2 Oct 2024 20:10:50 +0000 Subject: [PATCH 077/123] fix --- yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts index 42fa3fbd9c3..8000c72bc09 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts @@ -85,7 +85,7 @@ describe('e2e_p2p_reqresp_tx', () => { t.logger.info('Submitting transactions'); // Only submit transactions to the first two nodes, so that we avoid our sequencer with a mocked p2p layer being picked to produce a block. // If the shuffling algorithm changes, then this will need to be updated. - for (let i = 0; i < 2; i++) { + for (let i = 1; i < 3; i++) { const context = await createPXEServiceAndSubmitTransactions(t.logger, nodes[i], NUM_TXS_PER_NODE); contexts.push(context); } From e6530284178b057b1be5de94fcc531a183e65b7b Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 00:34:31 +0000 Subject: [PATCH 078/123] chore: further testing --- .../foundation/src/collection/index.ts | 1 + .../src/collection/ordered_map.test.ts | 90 +++++ .../foundation/src/collection/ordered_map.ts | 50 +++ .../src/public/light_public_processor.test.ts | 359 +++++++++++++++++- .../src/public/light_public_processor.ts | 101 +++-- 5 files changed, 529 insertions(+), 72 deletions(-) create mode 100644 yarn-project/foundation/src/collection/ordered_map.test.ts create mode 100644 yarn-project/foundation/src/collection/ordered_map.ts diff --git a/yarn-project/foundation/src/collection/index.ts b/yarn-project/foundation/src/collection/index.ts index 00f8115dd60..b495a892ef0 100644 --- a/yarn-project/foundation/src/collection/index.ts +++ b/yarn-project/foundation/src/collection/index.ts @@ -1,2 +1,3 @@ export * from './array.js'; export * from './object.js'; +export * from './ordered_map.js'; \ No newline at end of file diff --git a/yarn-project/foundation/src/collection/ordered_map.test.ts b/yarn-project/foundation/src/collection/ordered_map.test.ts new file mode 100644 index 00000000000..995ded29fc6 --- /dev/null +++ b/yarn-project/foundation/src/collection/ordered_map.test.ts @@ -0,0 +1,90 @@ +import { OrderedMap } from './ordered_map.js'; + +describe('OrderedMap', () => { + let orderedMap: OrderedMap; + + beforeEach(() => { + orderedMap = new OrderedMap(); + }); + + it('set should add new key-value pairs', () => { + orderedMap.set('a', 1); + expect(orderedMap.get('a')).toBe(1); + expect(orderedMap.has('a')).toBe(true); + }); + + it('set should update existing keys and move them to the end', () => { + orderedMap.set('a', 1); + orderedMap.set('b', 2); + orderedMap.set('a', 3); + expect(Array.from(orderedMap.values())).toEqual([2, 3]); + }); + + it('get should retrieve values for existing keys', () => { + orderedMap.set('a', 1); + expect(orderedMap.get('a')).toBe(1); + }); + + it('get should return undefined for non-existent keys', () => { + expect(orderedMap.get('a')).toBeUndefined(); + }); + + it('has should return true for existing keys', () => { + orderedMap.set('a', 1); + expect(orderedMap.has('a')).toBe(true); + }); + + it('has should return false for non-existent keys', () => { + expect(orderedMap.has('a')).toBe(false); + }); + + it('delete should remove existing keys and return true', () => { + orderedMap.set('a', 1); + expect(orderedMap.delete('a')).toBe(true); + expect(orderedMap.has('a')).toBe(false); + expect(Array.from(orderedMap.values())).toEqual([]); + }); + + test('delete should return false for non-existent keys', () => { + expect(orderedMap.delete('a')).toBe(false); + }); + + it('values should return an array of values in insertion order', () => { + orderedMap.set('a', 1); + orderedMap.set('b', 2); + orderedMap.set('c', 3); + expect(Array.from(orderedMap.values())).toEqual([1, 2, 3]); + }); + + it('values should reflect the updated order after re-insertion', () => { + orderedMap.set('a', 1); + orderedMap.set('b', 2); + orderedMap.set('a', 3); + expect(Array.from(orderedMap.values())).toEqual([2, 3]); + }); + + it('iterator should yield key-value pairs in insertion order', () => { + orderedMap.set('a', 1); + orderedMap.set('b', 2); + orderedMap.set('c', 3); + expect(Array.from(orderedMap)).toEqual([['a', 1], ['b', 2], ['c', 3]]); + }); + + it('iterator should reflect the updated order after re-insertion', () => { + orderedMap.set('a', 1); + orderedMap.set('b', 2); + orderedMap.set('a', 3); + expect(Array.from(orderedMap)).toEqual([['b', 2], ['a', 3]]); + }); + + it('multiple operations should maintain correct order and values', () => { + orderedMap.set('a', 1); + orderedMap.set('b', 2); + orderedMap.set('c', 3); + orderedMap.delete('b'); + orderedMap.set('d', 4); + orderedMap.set('a', 5); + expect(Array.from(orderedMap.values())).toEqual([3, 4, 5]); + expect(Array.from(orderedMap)).toEqual([['c', 3], ['d', 4], ['a', 5]]); + }); +}); \ No newline at end of file diff --git a/yarn-project/foundation/src/collection/ordered_map.ts b/yarn-project/foundation/src/collection/ordered_map.ts new file mode 100644 index 00000000000..194bc7f7a46 --- /dev/null +++ b/yarn-project/foundation/src/collection/ordered_map.ts @@ -0,0 +1,50 @@ +// This exists for public state squashing +// +// It has advantages over an traditional map as when we delete an item, +// we can insert to the back of the map +// This filtering is inefficient and a bottleneck over large lists +// Additionally getting values implements a copy +export class OrderedMap { + map = new Map(); + keys: K[] = []; + + constructor() {} + + set(key: K, value: V) { + if (this.map.has(key)) { + // Remove the key from the keys array + this.keys = this.keys.filter(k => k !== key); + } + // Add the key to the end of the keys array + this.keys.push(key); + // Set the value in the map + this.map.set(key, value); + } + + get(key: K) { + return this.map.get(key); + } + + has(key: K) { + return this.map.has(key); + } + + delete(key: K) { + if (this.map.delete(key)) { + this.keys = this.keys.filter(k => k !== key); + return true; + } + return false; + } + + values() { + return this.keys.map(key => this.map.get(key)!); + } + + *[Symbol.iterator]() { + for (let key of this.keys) { + yield [key, this.map.get(key)]; + } + } + + } \ No newline at end of file diff --git a/yarn-project/simulator/src/public/light_public_processor.test.ts b/yarn-project/simulator/src/public/light_public_processor.test.ts index 3a37fcf5e61..5d31cdee21e 100644 --- a/yarn-project/simulator/src/public/light_public_processor.test.ts +++ b/yarn-project/simulator/src/public/light_public_processor.test.ts @@ -1,5 +1,4 @@ import { - PublicDataWrite, type PublicExecutionRequest, SimulationError, type TreeInfo, @@ -9,6 +8,7 @@ import { import { AppendOnlyTreeSnapshot, AztecAddress, + ContractStorageRead, ContractStorageUpdateRequest, Fr, Gas, @@ -16,24 +16,21 @@ import { GasSettings, GlobalVariables, Header, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_NOTE_HASHES_PER_TX, + MAX_NULLIFIERS_PER_TX, PUBLIC_DATA_TREE_HEIGHT, PartialStateReference, PublicAccumulatedDataBuilder, PublicDataTreeLeafPreimage, PublicDataUpdateRequest, - RevertCode, StateReference, } from '@aztec/circuits.js'; -import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; import { fr, makeSelector } from '@aztec/circuits.js/testing'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; import { LightPublicProcessor, type PublicExecutionResult, - type PublicExecutor, type WorldStateDB, } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -43,7 +40,8 @@ import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { PublicExecutionResultBuilder, makeFunctionCall } from '../mocks/fixtures.js'; -import { Spied, SpyInstance } from 'jest-mock'; +import { NewStateUpdates } from './light_public_processor.js'; +import { padArrayEnd } from '@aztec/foundation/collection'; describe('public_processor', () => { let db: MockProxy; @@ -65,7 +63,7 @@ describe('public_processor', () => { worldStateDB.storageRead.mockResolvedValue(Fr.ZERO); }); - describe('with actual circuits', () => { + describe('Light Public Processor', () => { let publicDataTree: AppendOnlyTree; beforeAll(async () => { @@ -274,7 +272,9 @@ describe('public_processor', () => { } }); - await processor.process([tx]); + await expect(async () => { + await processor.process([tx]); + }).rejects.toThrow("Reverted in setup"); expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(1); @@ -366,15 +366,344 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(1); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + }); + + it('includes a transaction that reverts in app logic and teardown', async function () { + const tx = mockTx(1, { + hasLogs: true, + numberOfNonRevertiblePublicCallRequests: 1, + numberOfRevertiblePublicCallRequests: 1, + hasPublicTeardownCallRequest: true, + }); + + const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); + const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); + const teardownRequest = tx.getPublicTeardownExecutionRequest()!; + + const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); + const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; + + const nestedContractAddress = AztecAddress.fromBigInt(112233n); + const contractSlotA = fr(0x100); + const contractSlotB = fr(0x150); + const contractSlotC = fr(0x200); + + let simulatorCallCount = 0; + const simulatorResults: PublicExecutionResult[] = [ + // Setup + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: nonRevertibleRequests[0], + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: nonRevertibleRequests[0].callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), + ], + }).build(), + ], + }).build(), + + // App Logic + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: revertibleRequests[0], + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), + new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 15), + ], + revertReason: new SimulationError('Simulation Failed', []), + }).build(), + + // Teardown + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: teardownRequest, + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], + }).build(teardownResultSettings), + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], + revertReason: new SimulationError('Simulation Failed', []), + }).build(teardownResultSettings), + ], + }).build(teardownResultSettings), + ]; + + jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { + if (simulatorCallCount < simulatorResults.length) { + return Promise.resolve(simulatorResults[simulatorCallCount++]); + } else { + throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); + } + }); + + const stateUpdateSpy = jest.spyOn(processor as any, "writeStateUpdates"); + + await processor.process([tx]); + + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(2); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + + const nestContractAddress = + simulatorResults[0].nestedExecutions[0].executionRequest.contractAddress; + const expectedPublicDataWrites = [ + PublicDataUpdateRequest.fromContractStorageUpdateRequest( + nonRevertibleRequests[0].callContext.storageContractAddress, + new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11), + ), + PublicDataUpdateRequest.fromContractStorageUpdateRequest( + nestContractAddress , + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), + ), + PublicDataUpdateRequest.fromContractStorageUpdateRequest( + nestContractAddress, + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), + ), + ] + // Tx hash + state updates + const expectedStateUpdatesOpject: NewStateUpdates = { + nullifiers: padArrayEnd([tx.data.getNonEmptyNullifiers()[0]], Fr.ZERO, MAX_NULLIFIERS_PER_TX), + noteHashes: padArrayEnd([], Fr.ZERO, MAX_NOTE_HASHES_PER_TX), + publicDataWrites: expectedPublicDataWrites, + } + expect(stateUpdateSpy).toHaveBeenCalledWith(expectedStateUpdatesOpject); + }); + + it('runs a tx with all phases', async function () { + const tx = mockTx(1, { + numberOfNonRevertiblePublicCallRequests: 1, + numberOfRevertiblePublicCallRequests: 1, + hasPublicTeardownCallRequest: true, + }); + + const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); + const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); + const teardownRequest = tx.getPublicTeardownExecutionRequest()!; + + // TODO(md): gas + const gasLimits = Gas.from({ l2Gas: 1e9, daGas: 1e9 }); + const teardownGas = Gas.from({ l2Gas: 1e7, daGas: 1e7 }); + tx.data.constants.txContext.gasSettings = GasSettings.from({ + gasLimits: gasLimits, + teardownGasLimits: teardownGas, + inclusionFee: new Fr(1e4), + maxFeesPerGas: { feePerDaGas: new Fr(10), feePerL2Gas: new Fr(10) }, + }); + + // Private kernel tail to public pushes teardown gas allocation into revertible gas used + tx.data.forPublic!.end = PublicAccumulatedDataBuilder.fromPublicAccumulatedData(tx.data.forPublic!.end) + .withGasUsed(teardownGas) + .build(); + tx.data.forPublic!.endNonRevertibleData = PublicAccumulatedDataBuilder.fromPublicAccumulatedData( + tx.data.forPublic!.endNonRevertibleData, + ) + .withGasUsed(Gas.empty()) + .build(); + + const nestedContractAddress = revertibleRequests[0].callContext.storageContractAddress; + const contractSlotA = fr(0x100); + const contractSlotB = fr(0x150); + const contractSlotC = fr(0x200); + + let simulatorCallCount = 0; + + const initialGas = gasLimits.sub(teardownGas); + const setupGasUsed = Gas.from({ l2Gas: 1e6 }); + const appGasUsed = Gas.from({ l2Gas: 2e6, daGas: 2e6 }); + const teardownGasUsed = Gas.from({ l2Gas: 3e6, daGas: 3e6 }); + const afterSetupGas = initialGas.sub(setupGasUsed); + const afterAppGas = afterSetupGas.sub(appGasUsed); + const afterTeardownGas = teardownGas.sub(teardownGasUsed); + + // Total gas used is the sum of teardown gas allocation plus all expenditures along the way, + // without including the gas used in the teardown phase (since that's consumed entirely up front). + const expectedTotalGasUsed = { l2Gas: 1e7 + 1e6 + 2e6, daGas: 1e7 + 2e6 }; + + // Inclusion fee plus block gas fees times total gas used + const expectedTxFee = 1e4 + (1e7 + 1e6 + 2e6) * 1 + (1e7 + 2e6) * 1; + const transactionFee = new Fr(expectedTxFee); + + const simulatorResults: PublicExecutionResult[] = [ + // Setup + PublicExecutionResultBuilder.fromPublicExecutionRequest({ request: nonRevertibleRequests[0] }).build({ + startGasLeft: initialGas, + endGasLeft: afterSetupGas, + }), + + // App Logic + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: revertibleRequests[0], + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 10), + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 11), + ], + contractStorageReads: [new ContractStorageRead(contractSlotA, fr(0x100), 19)], + }).build({ + startGasLeft: afterSetupGas, + endGasLeft: afterAppGas, + }), + + // Teardown + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: teardownRequest, + nestedExecutions: [ + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x103), 16), + new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 17), + ], + contractStorageReads: [new ContractStorageRead(contractSlotA, fr(0x102), 15)], + }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), + PublicExecutionResultBuilder.fromFunctionCall({ + from: teardownRequest.callContext.storageContractAddress, + tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), + contractStorageUpdateRequests: [ + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 13), + new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), + ], + contractStorageReads: [new ContractStorageRead(contractSlotA, fr(0x101), 12)], + }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), + ], + }).build({ + startGasLeft: teardownGas, + endGasLeft: afterTeardownGas, + transactionFee, + }), + ]; + + jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { + if (simulatorCallCount < simulatorResults.length) { + const result = simulatorResults[simulatorCallCount++]; + return Promise.resolve(result); + } else { + throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); + } + }); + + const stateUpdateSpy = jest.spyOn(processor as any, "writeStateUpdates"); + + await processor.process([tx]); + + // TODO: gas accounting + // const expectedSimulateCall = (availableGas: Partial>, txFee: number) => [ + // expect.anything(), // PublicExecution + // expect.anything(), // GlobalVariables + // Gas.from(availableGas), + // expect.anything(), // TxContext + // expect.anything(), // pendingNullifiers + // new Fr(txFee), + // expect.anything(), // SideEffectCounter + // ]; + + // expect(publicExecutor.simulate).toHaveBeenCalledTimes(3); + // expect(publicExecutor.simulate).toHaveBeenNthCalledWith(1, ...expectedSimulateCall(initialGas, 0)); + // expect(publicExecutor.simulate).toHaveBeenNthCalledWith(2, ...expectedSimulateCall(afterSetupGas, 0)); + // expect(publicExecutor.simulate).toHaveBeenNthCalledWith(3, ...expectedSimulateCall(teardownGas, expectedTxFee)); + + expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); + expect(worldStateDB.commit).toHaveBeenCalledTimes(1); + expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + + // expect(processed[0].data.end.gasUsed).toEqual(Gas.from(expectedTotalGasUsed)); + // expect(processed[0].gasUsed[PublicKernelPhase.SETUP]).toEqual(setupGasUsed); + // expect(processed[0].gasUsed[PublicKernelPhase.APP_LOGIC]).toEqual(appGasUsed); + // expect(processed[0].gasUsed[PublicKernelPhase.TEARDOWN]).toEqual(teardownGasUsed); + + // const txEffect = toTxEffect(processed[0], GasFees.default()); + + const expectedPublicDataWrites = [ + PublicDataUpdateRequest.fromContractStorageUpdateRequest( + nestedContractAddress, + new ContractStorageUpdateRequest(contractSlotA, fr(0x103), 16), + ), + PublicDataUpdateRequest.fromContractStorageUpdateRequest( + nestedContractAddress , + new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 17), + ), + PublicDataUpdateRequest.fromContractStorageUpdateRequest( + nestedContractAddress , + new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), + ), + ] + // Tx hash + state updates + const expectedStateUpdatesOpject: NewStateUpdates = { + nullifiers: padArrayEnd([tx.data.getNonEmptyNullifiers()[0]], Fr.ZERO, MAX_NULLIFIERS_PER_TX), + noteHashes: padArrayEnd([], Fr.ZERO, MAX_NOTE_HASHES_PER_TX), + publicDataWrites: expectedPublicDataWrites, + } + expect(stateUpdateSpy).toHaveBeenCalledWith(expectedStateUpdatesOpject); + + }); + + it('runs a tx with only teardown', async function () { + const tx = mockTx(1, { + numberOfNonRevertiblePublicCallRequests: 0, + numberOfRevertiblePublicCallRequests: 0, + hasPublicTeardownCallRequest: true, + }); + + const teardownRequest = tx.getPublicTeardownExecutionRequest()!; + + const gasLimits = Gas.from({ l2Gas: 1e9, daGas: 1e9 }); + const teardownGas = Gas.from({ l2Gas: 1e7, daGas: 1e7 }); + tx.data.constants.txContext.gasSettings = GasSettings.from({ + gasLimits: gasLimits, + teardownGasLimits: teardownGas, + inclusionFee: new Fr(1e4), + maxFeesPerGas: { feePerDaGas: new Fr(10), feePerL2Gas: new Fr(10) }, + }); - // we keep the non-revertible logs - // TODO: keep track of this - // expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(3); - // expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(1); + // Private kernel tail to public pushes teardown gas allocation into revertible gas used + tx.data.forPublic!.end = PublicAccumulatedDataBuilder.fromPublicAccumulatedData(tx.data.forPublic!.end) + .withGasUsed(teardownGas) + .build(); + tx.data.forPublic!.endNonRevertibleData = PublicAccumulatedDataBuilder.fromPublicAccumulatedData( + tx.data.forPublic!.endNonRevertibleData, + ) + .withGasUsed(Gas.empty()) + .build(); + + let simulatorCallCount = 0; + const txOverhead = 1e4; + const expectedTxFee = txOverhead + teardownGas.l2Gas * 1 + teardownGas.daGas * 1; + const transactionFee = new Fr(expectedTxFee); + const teardownGasUsed = Gas.from({ l2Gas: 1e6, daGas: 1e6 }); + + const simulatorResults: PublicExecutionResult[] = [ + // Teardown + PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: teardownRequest, + nestedExecutions: [], + }).build({ + startGasLeft: teardownGas, + endGasLeft: teardownGas.sub(teardownGasUsed), + transactionFee, + }), + ]; - // expect(processed[0].data.revertCode).toEqual(RevertCode.TEARDOWN_REVERTED); + jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { + if (simulatorCallCount < simulatorResults.length) { + const result = simulatorResults[simulatorCallCount++]; + return Promise.resolve(result); + } else { + throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); + } + }); - // expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + // TODO(md): add some public state updates / nullifier updates here or something + await processor.process([tx]); }); + }); }); diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index a44d90db31b..ef6eeef9a6f 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -13,6 +13,8 @@ import { Fr } from "@aztec/foundation/fields"; import { siloNoteHash, siloNullifier } from "@aztec/circuits.js/hash"; import { buffer } from "stream/consumers"; import { makeTuple } from "@aztec/foundation/array"; +import { OrderedMap } from "@aztec/foundation/collection"; + export class LightPublicProcessorFactory { constructor( @@ -30,8 +32,7 @@ export class LightPublicProcessorFactory { // Make sure the world state synchronizer is synced await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); - // We will sync again whenever the block is created - // This could be an inefficiency + // We will sync again whenever the block is created this could be an inefficiency const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); @@ -47,7 +48,7 @@ export class InvalidTransactionsFound extends Error { } } -type NewStateUpdates = { +export type NewStateUpdates = { nullifiers: Fr[]; noteHashes: Fr[]; publicDataWrites: PublicDataUpdateRequest[]; @@ -57,8 +58,8 @@ type NewStateUpdates = { * A variant of the public processor that does not run the kernel circuits * * TODO(make issues): - * - Gas accounting is not complete - * - Calculating the state root (archive) is not complete + * - Gas accounting is not complete - https://github.com/AztecProtocol/aztec-packages/issues/8962 + * - Calculating the state root (archive) is not complete - https://github.com/AztecProtocol/aztec-packages/issues/8961 */ export class LightPublicProcessor { @@ -73,7 +74,6 @@ export class LightPublicProcessor { private merkleTrees: MerkleTreeOperations, private worldStateDB: WorldStateDB, private globalVariables: GlobalVariables, - // TODO(md): check if thies can be inferred private historicalHeader: Header, private txValidator: TxValidator, private telemetryClient: TelemetryClient @@ -83,7 +83,6 @@ export class LightPublicProcessor { // TODO: this will be the total gas limit available for a block this.blockGasLeft = Gas.empty(); this.pendingNullifiers = []; - } addNullifiers(nullifiers: Nullifier[]) { @@ -139,39 +138,20 @@ export class LightPublicProcessor { // throw new InvalidTransactionsFound(); } - // There is an assumption based on how the block builder works, that the transactions - // provided here CANNOT revert, else they are not added to the block as the kernels - // will fail - // This will change whenever the vm is changed to be able to revert - public async applyPrivateStateUpdates(tx: Tx) { - const insertionPromises: Promise[] = []; - if (tx.data.forRollup) { - const {nullifiers, noteHashes} = tx.data.forRollup.end; - if (nullifiers) { - insertionPromises.push(this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT)); - } - - if (noteHashes) { - insertionPromises.push(this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes)); - } - } - - await Promise.all(insertionPromises); - } async executeEnqueuedCallsAndApplyStateUpdates(tx: Tx) { const publicExecutionResults = await this.executePublicCalls(tx); - // TODO: handle this + + // The transaction has reverted in setup - the block should fail if (!publicExecutionResults) { - throw new Error("Public execution results are undefined"); + throw new Error("Reverted in setup"); } if (tx.data.forPublic) { - // tODO: put this in a function const stateUpdates = this.accumulateTransactionAndExecutedStateUpdates(tx, publicExecutionResults); await this.writeStateUpdates(stateUpdates); } else { - throw new Error("Transaction does not have public calls"); + throw new Error("Public transaction did have public data"); } } @@ -221,7 +201,6 @@ export class LightPublicProcessor { await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT); await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); - } // NOTES: @@ -305,6 +284,21 @@ export class LightPublicProcessor { return this.aggregateResults(nonRevertiblePublicExecutionResults, publicExecutionResults); } + // TODO: dont think splitting these up actually matters + aggregateResults(nonRevertibleResults: PublicExecutionResult[], revertibleResults: PublicExecutionResult[]) { + const nonRevertibleNestedExecutions = nonRevertibleResults.flatMap(res => this.collectNestedExecutions(res)); + const nonRevertible = this.aggregatePublicExecutionResults(nonRevertibleNestedExecutions); + + const revertibleNestedExecutions = revertibleResults.flatMap(res => this.collectNestedExecutions(res)); + const revertible = this.aggregatePublicExecutionResults(revertibleNestedExecutions); + + return { + nullifiers: [...nonRevertible.nullifiers, ...revertible.nullifiers], + noteHashes: [...nonRevertible.newNoteHashes, ...revertible.newNoteHashes], + publicDataWrites: [...nonRevertible.publicDataWrites, ...revertible.publicDataWrites] + } + } + // This is just getting the private state updates after executing them // TODO(md): think about these aggregatePublicExecutionResults(results: PublicExecutionResult[]) { @@ -315,7 +309,7 @@ export class LightPublicProcessor { for (const res of results) { const enqueuedCallNewNullifiers = []; const enqueuedCallNewNoteHashes = []; - const enqueuedCallPublicDataWrites = []; + let enqueuedCallPublicDataWrites = []; // Scope the nullifiers, note hashes and public data writes to the contract address enqueuedCallNewNullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); @@ -328,17 +322,12 @@ export class LightPublicProcessor { } // Reverse - // TODO: WHY to we need to do this? yucky yucky, reversal doesnt feel quite right - let newNullifiers = txCallNewNullifiers.reverse().flat(); - let newNoteHashes = txCallNewNoteHashes.reverse().flat(); + let newNullifiers = txCallNewNullifiers.flat(); + let newNoteHashes = txCallNewNoteHashes.flat(); const newPublicDataWrites = txCallPublicDataWrites.flat(); // Squash data writes let uniquePublicDataWrites = this.removeDuplicatesFromStart(newPublicDataWrites); - uniquePublicDataWrites = this.sortBySideEffectCounter(uniquePublicDataWrites); - - newNullifiers = this.sortBySideEffectCounter(newNullifiers); - newNoteHashes = this.sortBySideEffectCounter(newNoteHashes); const returning = { nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), @@ -357,27 +346,25 @@ export class LightPublicProcessor { } - aggregateResults(nonRevertibleResults: PublicExecutionResult[], revertibleResults: PublicExecutionResult[]) { - const nonRevertibleNestedExecutions = nonRevertibleResults.flatMap(res => this.collectNestedExecutions(res)); - const nonRevertible = this.aggregatePublicExecutionResults(nonRevertibleNestedExecutions); - const revertibleNestedExecutions = revertibleResults.flatMap(res => this.collectNestedExecutions(res)); - const revertible = this.aggregatePublicExecutionResults(revertibleNestedExecutions); + // There is an assumption based on how the block builder works, that the transactions + // provided here CANNOT revert, else they are not added to the block as the kernels + // will fail + // This will change whenever the vm is changed to be able to revert + public async applyPrivateStateUpdates(tx: Tx) { + const insertionPromises: Promise[] = []; + if (tx.data.forRollup) { + const {nullifiers, noteHashes} = tx.data.forRollup.end; + if (nullifiers) { + insertionPromises.push(this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT)); + } - return { - nullifiers: [...nonRevertible.nullifiers, ...revertible.nullifiers], - noteHashes: [...nonRevertible.newNoteHashes, ...revertible.newNoteHashes], - publicDataWrites: [...nonRevertible.publicDataWrites, ...revertible.publicDataWrites] + if (noteHashes) { + insertionPromises.push(this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes)); + } } - } - /** - * Sort by side effect counter, where the lowest is first - * @param items - * @returns - */ - sortBySideEffectCounter(items: T[]) { - return items.sort((a, b) => a.counter - b.counter); + await Promise.all(insertionPromises); } /** @@ -386,7 +373,7 @@ export class LightPublicProcessor { * @returns */ removeDuplicatesFromStart(items: PublicDataUpdateRequest[]) { - const slotMap: Map = new Map(); + const slotMap: OrderedMap = new OrderedMap(); for (const obj of items) { const { leafSlot, sideEffectCounter } = obj; From bebea79097c5a6353d90474e9d508820ecd977ac Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 01:05:04 +0000 Subject: [PATCH 079/123] fmt --- .../src/structs/public_data_update_request.ts | 8 +- .../end-to-end/src/e2e_p2p/p2p_network.ts | 60 +- .../end-to-end/src/e2e_p2p/reex.test.ts | 27 +- yarn-project/end-to-end/src/e2e_p2p/shared.ts | 15 +- .../end-to-end/src/fixtures/setup_p2p_test.ts | 4 - .../src/fixtures/snapshot_manager.ts | 7 +- .../foundation/src/collection/index.ts | 2 +- .../src/collection/ordered_map.test.ts | 19 +- .../foundation/src/collection/ordered_map.ts | 71 +- .../orchestrator/block-building-helpers.ts | 1 - yarn-project/simulator/src/public/executor.ts | 6 +- .../src/public/light_public_processor.test.ts | 108 +-- .../src/public/light_public_processor.ts | 809 ++++++++++-------- .../validator-client/src/validator.ts | 5 +- 14 files changed, 605 insertions(+), 537 deletions(-) diff --git a/yarn-project/circuits.js/src/structs/public_data_update_request.ts b/yarn-project/circuits.js/src/structs/public_data_update_request.ts index 06f561dcce6..fb4a17e21c5 100644 --- a/yarn-project/circuits.js/src/structs/public_data_update_request.ts +++ b/yarn-project/circuits.js/src/structs/public_data_update_request.ts @@ -1,12 +1,11 @@ -import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; +import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { inspect } from 'util'; -import { GeneratorIndex } from '../constants.gen.js'; -import { type AztecAddress } from '@aztec/foundation/aztec-address'; -import { type ContractStorageUpdateRequest } from './contract_storage_update_request.js'; + import { computePublicDataTreeLeafSlot } from '../hash/hash.js'; +import { type ContractStorageUpdateRequest } from './contract_storage_update_request.js'; /** * Write operations on the public data tree including the previous value. @@ -83,7 +82,6 @@ export class PublicDataUpdateRequest { const leafSlot = computePublicDataTreeLeafSlot(contractAddress, updateRequest.storageSlot); return new PublicDataUpdateRequest(leafSlot, updateRequest.newValue, updateRequest.counter); - } static empty() { diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index acb4224d471..9f6f9f3c7ac 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,9 +1,11 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; import { type AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; import { AZTEC_SLOT_DURATION, ETHEREUM_SLOT_DURATION, EthAddress } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; -import { BootnodeConfig, createLibP2PPeerId, type BootstrapNode } from '@aztec/p2p'; +import { SpamContract } from '@aztec/noir-contracts.js'; +import { type BootstrapNode } from '@aztec/p2p'; import getPort from 'get-port'; import { getContract } from 'viem'; @@ -15,10 +17,13 @@ import { generateNodePrivateKeys, generatePeerIdPrivateKeys, } from '../fixtures/setup_p2p_test.js'; -import { type ISnapshotManager, type SubsystemsContext, addAccounts, createSnapshotManager } from '../fixtures/snapshot_manager.js'; +import { + type ISnapshotManager, + type SubsystemsContext, + addAccounts, + createSnapshotManager, +} from '../fixtures/snapshot_manager.js'; import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; -import { SpamContract } from '@aztec/noir-contracts.js'; -import { getSchnorrAccount } from '@aztec/accounts/schnorr'; // Use a fixed bootstrap node private key so that we can re-use the same snapshot and the nodes can find each other const BOOTSTRAP_NODE_PRIVATE_KEY = '080212208f988fc0899e4a73a5aee4d271a5f20670603a756ad8d84f2c94263a6427c591'; @@ -134,33 +139,40 @@ export class P2PNetworkTest { account: this.baseAccount, }), }); - }); } - async setupAccount() { - await this.snapshotManager.snapshot("setup-account", addAccounts(1, this.logger, false), async ({accountKeys}, ctx) => { + async setupAccount() { + await this.snapshotManager.snapshot( + 'setup-account', + addAccounts(1, this.logger, false), + async ({ accountKeys }, ctx) => { const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); - await Promise.all(accountManagers.map(a => a.register())); - const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); - this.wallet = wallets[0]; - }); + await Promise.all(accountManagers.map(a => a.register())); + const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + this.wallet = wallets[0]; + }, + ); } async deploySpamContract() { - await this.snapshotManager.snapshot("add-spam-contract", async () => { - if (!this.wallet) { - throw new Error('Call snapshot t.setupAccount before deploying account contract'); - } - - const spamContract = await SpamContract.deploy(this.wallet).send().deployed(); - return {contractAddress: spamContract.address}; - }, async ({contractAddress}) => { - if (!this.wallet) { - throw new Error('Call snapshot t.setupAccount before deploying account contract'); - } - this.spamContract = await SpamContract.at(contractAddress, this.wallet); - }); + await this.snapshotManager.snapshot( + 'add-spam-contract', + async () => { + if (!this.wallet) { + throw new Error('Call snapshot t.setupAccount before deploying account contract'); + } + + const spamContract = await SpamContract.deploy(this.wallet).send().deployed(); + return { contractAddress: spamContract.address }; + }, + async ({ contractAddress }) => { + if (!this.wallet) { + throw new Error('Call snapshot t.setupAccount before deploying account contract'); + } + this.spamContract = await SpamContract.at(contractAddress, this.wallet); + }, + ); } async setup() { diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index ca036dea671..a3f79c4bcbe 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -1,17 +1,13 @@ - import { type AztecNodeService } from '@aztec/aztec-node'; import { sleep } from '@aztec/aztec.js'; +import { beforeAll, describe, it } from '@jest/globals'; import fs from 'fs'; import { createNodes } from '../fixtures/setup_p2p_test.js'; import { P2PNetworkTest, WAIT_FOR_TX_TIMEOUT } from './p2p_network.js'; import { submitComplexTxsTo } from './shared.js'; -import { afterEach, beforeAll, describe, it } from '@jest/globals'; -import { RollupAbi } from '@aztec/l1-artifacts'; -import { getContract } from 'viem'; -// Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; const NUM_TXS_PER_NODE = 1; const BOOT_NODE_UDP_PORT = 41000; @@ -22,12 +18,16 @@ describe('e2e_p2p_reex', () => { let t: P2PNetworkTest; beforeAll(async () => { t = await P2PNetworkTest.create('e2e_p2p_reex', NUM_NODES, BOOT_NODE_UDP_PORT); + t.logger.verbose('Setup account'); await t.setupAccount(); + t.logger.verbose('Deploy spam contract'); await t.deploySpamContract(); + t.logger.verbose('Apply base snapshots'); await t.applyBaseSnapshots(); + t.logger.verbose('Setup nodes'); await t.setup(); }); @@ -45,24 +45,22 @@ describe('e2e_p2p_reex', () => { throw new Error('Bootstrap node ENR is not available'); } - // TODO(md): make this part of a snapshot - t.ctx.aztecNodeConfig.validatorReEx = true; const nodes: AztecNodeService[] = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, - NUM_NODES, - BOOT_NODE_UDP_PORT, - ); + NUM_NODES, + BOOT_NODE_UDP_PORT, + ); // wait a bit for peers to discover each other await sleep(4000); // tODO: use a tx with nested calls nodes.forEach(node => { - node.getSequencer()?.updateSequencerConfig( { + node.getSequencer()?.updateSequencerConfig({ minTxsPerBlock: NUM_TXS_PER_NODE, maxTxsPerBlock: NUM_TXS_PER_NODE, }); @@ -72,13 +70,12 @@ describe('e2e_p2p_reex', () => { // now ensure that all txs were successfully mined await Promise.all( txs.map(async (tx, i) => { - t.logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); - return tx.wait({timeout: WAIT_FOR_TX_TIMEOUT}); - }), + t.logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); + return tx.wait({ timeout: WAIT_FOR_TX_TIMEOUT }); + }), ); // shutdown all nodes. await t.stopNodes(nodes); }); }); - diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index e2eefe23456..5685f4b26bd 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -3,24 +3,25 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { type DebugLogger, type SentTx } from '@aztec/aztec.js'; import { CompleteAddress, TxStatus } from '@aztec/aztec.js'; import { Fr, GrumpkinScalar } from '@aztec/foundation/fields'; +import { type SpamContract } from '@aztec/noir-contracts.js'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; -import { type NodeContext } from '../fixtures/setup_p2p_test.js'; import { expect } from '@jest/globals'; -import { type SpamContract } from '@aztec/noir-contracts.js'; - // submits a set of transactions to the provided Private eXecution Environment (PXE) - export const submitComplexTxsTo = async (logger: DebugLogger, spamContract: SpamContract, numTxs: number) => { +import { type NodeContext } from '../fixtures/setup_p2p_test.js'; + +// submits a set of transactions to the provided Private eXecution Environment (PXE) +export const submitComplexTxsTo = async (logger: DebugLogger, spamContract: SpamContract, numTxs: number) => { const txs: SentTx[] = []; - const seed = 1234n - const spamCount = 15 + const seed = 1234n; + const spamCount = 15; for (let i = 0; i < numTxs; i++) { // TODO: check out batch call for deployments // Send a public mint tx - this will be minted from the token contract to the pxe account // const tx = token.methods.mint_public(accountManager.getCompleteAddress().address, 1n).send() - const tx = spamContract.methods.spam(seed + BigInt(i * spamCount), spamCount, false, true ).send() + const tx = spamContract.methods.spam(seed + BigInt(i * spamCount), spamCount, false, true).send(); const txHash = await tx.getTxHash(); logger.info(`Tx sent with hash ${txHash}`); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 59e2c5f016e..659443a1445 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -11,10 +11,8 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import getPort from 'get-port'; import { generatePrivateKey } from 'viem/accounts'; - import { getPrivateKeyFromIndex } from './utils.js'; - export interface NodeContext { node: AztecNodeService; pxeService: PXEService; @@ -152,5 +150,3 @@ async function startBootstrapNode(config: BootnodeConfig) { await bootstrapNode.start(config); return bootstrapNode; } - - diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index c5eef7d818c..ad0bea2cb2d 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -23,6 +23,7 @@ import { type DeployL1ContractsArgs, createL1Clients } from '@aztec/ethereum'; import { asyncMap } from '@aztec/foundation/async-map'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; import { resolver, reviver } from '@aztec/foundation/serialize'; +import { RollupAbi } from '@aztec/l1-artifacts'; import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -33,8 +34,8 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; import getPort from 'get-port'; import { join } from 'path'; -import { getContract, type Hex } from 'viem'; -import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; +import { type Hex, getContract } from 'viem'; +import { mnemonicToAccount } from 'viem/accounts'; import { MNEMONIC } from './fixtures.js'; import { getACVMConfig } from './get_acvm_config.js'; @@ -47,7 +48,6 @@ import { getPrivateKeyFromIndex, startAnvil, } from './utils.js'; -import { RollupAbi } from '@aztec/l1-artifacts'; export type SubsystemsContext = { anvil: Anvil; @@ -407,7 +407,6 @@ async function setupFromFresh( await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash }); } - return { aztecNodeConfig, anvil, diff --git a/yarn-project/foundation/src/collection/index.ts b/yarn-project/foundation/src/collection/index.ts index b495a892ef0..d315cf2142f 100644 --- a/yarn-project/foundation/src/collection/index.ts +++ b/yarn-project/foundation/src/collection/index.ts @@ -1,3 +1,3 @@ export * from './array.js'; export * from './object.js'; -export * from './ordered_map.js'; \ No newline at end of file +export * from './ordered_map.js'; diff --git a/yarn-project/foundation/src/collection/ordered_map.test.ts b/yarn-project/foundation/src/collection/ordered_map.test.ts index 995ded29fc6..611dd6534ba 100644 --- a/yarn-project/foundation/src/collection/ordered_map.test.ts +++ b/yarn-project/foundation/src/collection/ordered_map.test.ts @@ -67,14 +67,21 @@ describe('OrderedMap', () => { orderedMap.set('a', 1); orderedMap.set('b', 2); orderedMap.set('c', 3); - expect(Array.from(orderedMap)).toEqual([['a', 1], ['b', 2], ['c', 3]]); + expect(Array.from(orderedMap)).toEqual([ + ['a', 1], + ['b', 2], + ['c', 3], + ]); }); it('iterator should reflect the updated order after re-insertion', () => { orderedMap.set('a', 1); orderedMap.set('b', 2); orderedMap.set('a', 3); - expect(Array.from(orderedMap)).toEqual([['b', 2], ['a', 3]]); + expect(Array.from(orderedMap)).toEqual([ + ['b', 2], + ['a', 3], + ]); }); it('multiple operations should maintain correct order and values', () => { @@ -85,6 +92,10 @@ describe('OrderedMap', () => { orderedMap.set('d', 4); orderedMap.set('a', 5); expect(Array.from(orderedMap.values())).toEqual([3, 4, 5]); - expect(Array.from(orderedMap)).toEqual([['c', 3], ['d', 4], ['a', 5]]); + expect(Array.from(orderedMap)).toEqual([ + ['c', 3], + ['d', 4], + ['a', 5], + ]); }); -}); \ No newline at end of file +}); diff --git a/yarn-project/foundation/src/collection/ordered_map.ts b/yarn-project/foundation/src/collection/ordered_map.ts index 194bc7f7a46..8cb7fef1891 100644 --- a/yarn-project/foundation/src/collection/ordered_map.ts +++ b/yarn-project/foundation/src/collection/ordered_map.ts @@ -5,46 +5,45 @@ // This filtering is inefficient and a bottleneck over large lists // Additionally getting values implements a copy export class OrderedMap { - map = new Map(); - keys: K[] = []; + map = new Map(); + keys: K[] = []; - constructor() {} + constructor() {} - set(key: K, value: V) { - if (this.map.has(key)) { - // Remove the key from the keys array - this.keys = this.keys.filter(k => k !== key); - } - // Add the key to the end of the keys array - this.keys.push(key); - // Set the value in the map - this.map.set(key, value); + set(key: K, value: V) { + if (this.map.has(key)) { + // Remove the key from the keys array + this.keys = this.keys.filter(k => k !== key); } - - get(key: K) { - return this.map.get(key); - } - - has(key: K) { - return this.map.has(key); + // Add the key to the end of the keys array + this.keys.push(key); + // Set the value in the map + this.map.set(key, value); + } + + get(key: K) { + return this.map.get(key); + } + + has(key: K) { + return this.map.has(key); + } + + delete(key: K) { + if (this.map.delete(key)) { + this.keys = this.keys.filter(k => k !== key); + return true; } + return false; + } - delete(key: K) { - if (this.map.delete(key)) { - this.keys = this.keys.filter(k => k !== key); - return true; - } - return false; - } + values() { + return this.keys.map(key => this.map.get(key)!); + } - values() { - return this.keys.map(key => this.map.get(key)!); + *[Symbol.iterator]() { + for (const key of this.keys) { + yield [key, this.map.get(key)]; } - - *[Symbol.iterator]() { - for (let key of this.keys) { - yield [key, this.map.get(key)]; - } - } - - } \ No newline at end of file + } +} diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index e674fc25e2f..045f098b472 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -44,7 +44,6 @@ import { VK_TREE_HEIGHT, type VerificationKeyAsFields, type VerificationKeyData, - getNonEmptyItems, } from '@aztec/circuits.js'; import { assertPermutation, makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index a84f69c2e8a..47b2fd02891 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -21,7 +21,11 @@ import { PublicSideEffectTrace } from './side_effect_trace.js'; export class PublicExecutor { metrics: ExecutorMetrics; - constructor(private readonly worldStateDB: WorldStateDB, private readonly historicalHeader: Header, client: TelemetryClient) { + constructor( + private readonly worldStateDB: WorldStateDB, + private readonly historicalHeader: Header, + client: TelemetryClient, + ) { this.metrics = new ExecutorMetrics(client, 'PublicExecutor'); } diff --git a/yarn-project/simulator/src/public/light_public_processor.test.ts b/yarn-project/simulator/src/public/light_public_processor.test.ts index 5d31cdee21e..b54d1c18d53 100644 --- a/yarn-project/simulator/src/public/light_public_processor.test.ts +++ b/yarn-project/simulator/src/public/light_public_processor.test.ts @@ -26,13 +26,11 @@ import { StateReference, } from '@aztec/circuits.js'; import { fr, makeSelector } from '@aztec/circuits.js/testing'; +import { padArrayEnd } from '@aztec/foundation/collection'; +import { type FieldsOf } from '@aztec/foundation/types'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; -import { - LightPublicProcessor, - type PublicExecutionResult, - type WorldStateDB, -} from '@aztec/simulator'; +import { LightPublicProcessor, type PublicExecutionResult, type WorldStateDB } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MerkleTreeOperations } from '@aztec/world-state'; @@ -40,8 +38,7 @@ import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { PublicExecutionResultBuilder, makeFunctionCall } from '../mocks/fixtures.js'; -import { NewStateUpdates } from './light_public_processor.js'; -import { padArrayEnd } from '@aztec/foundation/collection'; +import { type NewStateUpdates } from './light_public_processor.js'; describe('public_processor', () => { let db: MockProxy; @@ -108,10 +105,12 @@ describe('public_processor', () => { new NoopTelemetryClient(), ); - publicExecutorSpy = jest.spyOn(processor.publicExecutor, 'simulate') + publicExecutorSpy = jest.spyOn(processor.publicExecutor, 'simulate'); publicExecutorSpy.mockImplementation((req: PublicExecutionRequest) => { - const result = PublicExecutionResultBuilder.fromPublicExecutionRequest({ request: req as PublicExecutionRequest }).build(); + const result = PublicExecutionResultBuilder.fromPublicExecutionRequest({ + request: req as PublicExecutionRequest, + }).build(); return Promise.resolve(result); }); }); @@ -128,7 +127,6 @@ describe('public_processor', () => { const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; @@ -263,7 +261,6 @@ describe('public_processor', () => { }).build(), ]; - jest.spyOn((processor as any).publicExecutor, 'simulate').mockImplementation(execution => { if (simulatorCallCount < simulatorResults.length) { return Promise.resolve(simulatorResults[simulatorCallCount++]); @@ -274,7 +271,7 @@ describe('public_processor', () => { await expect(async () => { await processor.process([tx]); - }).rejects.toThrow("Reverted in setup"); + }).rejects.toThrow('Reverted in setup'); expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(1); @@ -365,7 +362,6 @@ describe('public_processor', () => { expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); expect(worldStateDB.commit).toHaveBeenCalledTimes(1); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - }); it('includes a transaction that reverts in app logic and teardown', async function () { @@ -443,7 +439,7 @@ describe('public_processor', () => { } }); - const stateUpdateSpy = jest.spyOn(processor as any, "writeStateUpdates"); + const stateUpdateSpy = jest.spyOn(processor as any, 'writeStateUpdates'); await processor.process([tx]); @@ -452,28 +448,27 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(1); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - const nestContractAddress = - simulatorResults[0].nestedExecutions[0].executionRequest.contractAddress; + const nestContractAddress = simulatorResults[0].nestedExecutions[0].executionRequest.contractAddress; const expectedPublicDataWrites = [ PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nonRevertibleRequests[0].callContext.storageContractAddress, - new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11), + nonRevertibleRequests[0].callContext.storageContractAddress, + new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11), ), PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestContractAddress , - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), + nestContractAddress, + new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), ), PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestContractAddress, - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), + nestContractAddress, + new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), ), - ] + ]; // Tx hash + state updates const expectedStateUpdatesOpject: NewStateUpdates = { nullifiers: padArrayEnd([tx.data.getNonEmptyNullifiers()[0]], Fr.ZERO, MAX_NULLIFIERS_PER_TX), noteHashes: padArrayEnd([], Fr.ZERO, MAX_NOTE_HASHES_PER_TX), publicDataWrites: expectedPublicDataWrites, - } + }; expect(stateUpdateSpy).toHaveBeenCalledWith(expectedStateUpdatesOpject); }); @@ -523,10 +518,6 @@ describe('public_processor', () => { const afterAppGas = afterSetupGas.sub(appGasUsed); const afterTeardownGas = teardownGas.sub(teardownGasUsed); - // Total gas used is the sum of teardown gas allocation plus all expenditures along the way, - // without including the gas used in the teardown phase (since that's consumed entirely up front). - const expectedTotalGasUsed = { l2Gas: 1e7 + 1e6 + 2e6, daGas: 1e7 + 2e6 }; - // Inclusion fee plus block gas fees times total gas used const expectedTxFee = 1e4 + (1e7 + 1e6 + 2e6) * 1 + (1e7 + 2e6) * 1; const transactionFee = new Fr(expectedTxFee); @@ -590,60 +581,55 @@ describe('public_processor', () => { } }); - const stateUpdateSpy = jest.spyOn(processor as any, "writeStateUpdates"); + const stateUpdateSpy = jest.spyOn(processor as any, 'writeStateUpdates'); await processor.process([tx]); - // TODO: gas accounting - // const expectedSimulateCall = (availableGas: Partial>, txFee: number) => [ - // expect.anything(), // PublicExecution - // expect.anything(), // GlobalVariables - // Gas.from(availableGas), - // expect.anything(), // TxContext - // expect.anything(), // pendingNullifiers - // new Fr(txFee), - // expect.anything(), // SideEffectCounter - // ]; - - // expect(publicExecutor.simulate).toHaveBeenCalledTimes(3); - // expect(publicExecutor.simulate).toHaveBeenNthCalledWith(1, ...expectedSimulateCall(initialGas, 0)); - // expect(publicExecutor.simulate).toHaveBeenNthCalledWith(2, ...expectedSimulateCall(afterSetupGas, 0)); - // expect(publicExecutor.simulate).toHaveBeenNthCalledWith(3, ...expectedSimulateCall(teardownGas, expectedTxFee)); + const expectedSimulateCall = (availableGas: Partial>, _txFee: number) => [ + expect.anything(), // PublicExecution + expect.anything(), // GlobalVariables + Gas.from(availableGas), + expect.anything(), // TxContext + expect.anything(), // pendingNullifiers + new Fr(0), + // new Fr(txFee), + expect.anything(), // SideEffectCounter + ]; + + expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(3); + expect(processor.publicExecutor.simulate).toHaveBeenNthCalledWith(1, ...expectedSimulateCall(initialGas, 0)); + expect(processor.publicExecutor.simulate).toHaveBeenNthCalledWith(2, ...expectedSimulateCall(afterSetupGas, 0)); + expect(processor.publicExecutor.simulate).toHaveBeenNthCalledWith( + 3, + ...expectedSimulateCall(teardownGas, expectedTxFee), + ); expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); expect(worldStateDB.commit).toHaveBeenCalledTimes(1); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - // expect(processed[0].data.end.gasUsed).toEqual(Gas.from(expectedTotalGasUsed)); - // expect(processed[0].gasUsed[PublicKernelPhase.SETUP]).toEqual(setupGasUsed); - // expect(processed[0].gasUsed[PublicKernelPhase.APP_LOGIC]).toEqual(appGasUsed); - // expect(processed[0].gasUsed[PublicKernelPhase.TEARDOWN]).toEqual(teardownGasUsed); - - // const txEffect = toTxEffect(processed[0], GasFees.default()); - const expectedPublicDataWrites = [ PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestedContractAddress, - new ContractStorageUpdateRequest(contractSlotA, fr(0x103), 16), + nestedContractAddress, + new ContractStorageUpdateRequest(contractSlotA, fr(0x103), 16), ), PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestedContractAddress , - new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 17), + nestedContractAddress, + new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 17), ), PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestedContractAddress , - new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), + nestedContractAddress, + new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), ), - ] + ]; // Tx hash + state updates const expectedStateUpdatesOpject: NewStateUpdates = { nullifiers: padArrayEnd([tx.data.getNonEmptyNullifiers()[0]], Fr.ZERO, MAX_NULLIFIERS_PER_TX), noteHashes: padArrayEnd([], Fr.ZERO, MAX_NOTE_HASHES_PER_TX), publicDataWrites: expectedPublicDataWrites, - } + }; expect(stateUpdateSpy).toHaveBeenCalledWith(expectedStateUpdatesOpject); - }); it('runs a tx with only teardown', async function () { @@ -701,9 +687,7 @@ describe('public_processor', () => { } }); - // TODO(md): add some public state updates / nullifier updates here or something await processor.process([tx]); }); - }); }); diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index ef6eeef9a6f..e8c2d93aa7c 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -1,58 +1,76 @@ // A minimal version of the public processor - that does not have the fluff - -import { MerkleTreeId, PublicDataWrite, Tx, type TxValidator, type WorldStateSynchronizer } from "@aztec/circuit-types"; -import { AztecAddress, Gas, getNonEmptyItems, type GlobalVariables, type Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NoteHash, type Nullifier, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, PublicDataUpdateRequest, type ScopedNoteHash, type ScopedNullifier } from "@aztec/circuits.js"; -import { type MerkleTreeOperations } from "@aztec/world-state"; -import { WorldStateDB } from "./public_db_sources.js"; -import { PublicExecutor } from "./executor.js"; -import { type TelemetryClient } from "@aztec/telemetry-client"; -import { type PublicExecutionResult } from "./execution.js"; -import { type ContractDataSource } from "@aztec/types/contracts"; -import { padArrayEnd } from "@aztec/foundation/collection"; -import { Fr } from "@aztec/foundation/fields"; -import { siloNoteHash, siloNullifier } from "@aztec/circuits.js/hash"; -import { buffer } from "stream/consumers"; -import { makeTuple } from "@aztec/foundation/array"; -import { OrderedMap } from "@aztec/foundation/collection"; - +import { MerkleTreeId, Tx, type TxValidator, type WorldStateSynchronizer } from '@aztec/circuit-types'; +import { + Gas, + type GlobalVariables, + type Header, + MAX_NOTE_HASHES_PER_TX, + MAX_NULLIFIERS_PER_TX, + MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + NULLIFIER_SUBTREE_HEIGHT, + type Nullifier, + PUBLIC_DATA_SUBTREE_HEIGHT, + PublicDataTreeLeaf, + PublicDataUpdateRequest, + type ScopedNoteHash, + type ScopedNullifier, + getNonEmptyItems, +} from '@aztec/circuits.js'; +import { siloNoteHash, siloNullifier } from '@aztec/circuits.js/hash'; +import { makeTuple } from '@aztec/foundation/array'; +import { OrderedMap, padArrayEnd } from '@aztec/foundation/collection'; +import { Fr } from '@aztec/foundation/fields'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { type ContractDataSource } from '@aztec/types/contracts'; +import { type MerkleTreeOperations } from '@aztec/world-state'; + +import { type PublicExecutionResult } from './execution.js'; +import { PublicExecutor } from './executor.js'; +import { WorldStateDB } from './public_db_sources.js'; export class LightPublicProcessorFactory { - constructor( - private worldStateSynchronizer: WorldStateSynchronizer, - private contractDataSource: ContractDataSource, - private telemetryClient: TelemetryClient - ) {} - - public async createWithSyncedState( - targetBlockNumber: number, - maybeHistoricalHeader: Header | undefined, - globalVariables: GlobalVariables, - txValidator: TxValidator - ) { - // Make sure the world state synchronizer is synced - await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); - - // We will sync again whenever the block is created this could be an inefficiency - const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); - const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); - const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); - - return new LightPublicProcessor(merkleTrees, worldStateDB, globalVariables, historicalHeader, txValidator, this.telemetryClient); - } + constructor( + private worldStateSynchronizer: WorldStateSynchronizer, + private contractDataSource: ContractDataSource, + private telemetryClient: TelemetryClient, + ) {} + + public async createWithSyncedState( + targetBlockNumber: number, + maybeHistoricalHeader: Header | undefined, + globalVariables: GlobalVariables, + txValidator: TxValidator, + ) { + // Make sure the world state synchronizer is synced + await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); + + // We will sync again whenever the block is created this could be an inefficiency + const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); + const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); + const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); + + return new LightPublicProcessor( + merkleTrees, + worldStateDB, + globalVariables, + historicalHeader, + txValidator, + this.telemetryClient, + ); + } } - export class InvalidTransactionsFound extends Error { - constructor() { - super("Double spend tx found"); - } + constructor() { + super('Double spend tx found'); + } } export type NewStateUpdates = { - nullifiers: Fr[]; - noteHashes: Fr[]; - publicDataWrites: PublicDataUpdateRequest[]; -} + nullifiers: Fr[]; + noteHashes: Fr[]; + publicDataWrites: PublicDataUpdateRequest[]; +}; /** * A variant of the public processor that does not run the kernel circuits @@ -62,350 +80,401 @@ export type NewStateUpdates = { * - Calculating the state root (archive) is not complete - https://github.com/AztecProtocol/aztec-packages/issues/8961 */ export class LightPublicProcessor { - - public publicExecutor: PublicExecutor; - - // State - // TODO(md): not used - private blockGasLeft: Gas; - private pendingNullifiers: Nullifier[]; - - constructor( - private merkleTrees: MerkleTreeOperations, - private worldStateDB: WorldStateDB, - private globalVariables: GlobalVariables, - private historicalHeader: Header, - private txValidator: TxValidator, - private telemetryClient: TelemetryClient - ) { - this.publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); - - // TODO: this will be the total gas limit available for a block - this.blockGasLeft = Gas.empty(); - this.pendingNullifiers = []; + public publicExecutor: PublicExecutor; + + // State + private pendingNullifiers: Nullifier[]; + + constructor( + private merkleTrees: MerkleTreeOperations, + private worldStateDB: WorldStateDB, + private globalVariables: GlobalVariables, + historicalHeader: Header, + txValidator: TxValidator, + telemetryClient: TelemetryClient, + ) { + this.publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); + this.pendingNullifiers = []; + } + + addNullifiers(nullifiers: Nullifier[]) { + this.pendingNullifiers.push(...nullifiers); + } + + public getTreeSnapshots() { + return Promise.all([ + this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), + this.merkleTrees.getTreeInfo(MerkleTreeId.ARCHIVE), + ]); + } + + /** + * Process a list of transactions + * + * If any of the transactions are invalid, then we throw an error + * @param txs - The transactions to process + */ + public async process(txs: Tx[]) { + // TODO(md): do we need dummy transactions? + txs = txs.map(tx => Tx.clone(tx)); + + this.validateTransactions(txs); + + for (const tx of txs) { + if (tx.hasPublicCalls()) { + await this.executeEnqueuedCallsAndApplyStateUpdates(tx); + } else { + await this.applyPrivateStateUpdates(tx); + + // TODO(md): do i need to do this? + // Apply empty public data writes + const emptyPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, () => + PublicDataTreeLeaf.empty(), + ); + await this.merkleTrees.batchInsert( + MerkleTreeId.PUBLIC_DATA_TREE, + emptyPublicDataWrites.map(x => x.toBuffer()), + PUBLIC_DATA_SUBTREE_HEIGHT, + ); + } } - - addNullifiers(nullifiers: Nullifier[]) { - this.pendingNullifiers.push(...nullifiers); + } + + validateTransactions(_txs: Tx[]) { + // TODO: Run tx validator checks + // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + // TODO(md): check the last valid block number is close + // If any of the transactions are invalid, we throw an error + // if (invalidTxs.length > 0) { + // console.log("invalid txs found"); + // throw new InvalidTransactionsFound(); + } + + async executeEnqueuedCallsAndApplyStateUpdates(tx: Tx) { + const publicExecutionResults = await this.executePublicCalls(tx); + + // The transaction has reverted in setup - the block should fail + if (!publicExecutionResults) { + throw new Error('Reverted in setup'); } - // NOTE: does not have the archive - public async getTreeSnapshots() { - return Promise.all([ - this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), - ]); + if (tx.data.forPublic) { + const stateUpdates = this.accumulateTransactionAndExecutedStateUpdates(tx, publicExecutionResults); + await this.writeStateUpdates(stateUpdates); + } else { + throw new Error('Public transaction did have public data'); } - - /** - * Process a list of transactions - * - * If any of the transactions are invalid, then we throw an error - * @param txs - The transactions to process - */ - public async process(txs: Tx[]) { - // TODO(md): do we need dummy transactions? - txs = txs.map(tx => Tx.clone(tx)); - - this.validateTransactions(txs); - - for (const tx of txs) { - if (tx.hasPublicCalls()) { - await this.executeEnqueuedCallsAndApplyStateUpdates(tx); - } else { - await this.applyPrivateStateUpdates(tx); - - // TODO(md): do i need to do this? - // Apply empty public data writes - const emptyPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, () => PublicDataTreeLeaf.empty()); - await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, emptyPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); - } - } - + } + + /** + * Take the state updates from each of the transactions and merge them together + * + * 1. Non Revertible calls come first + * 2. Private updates come second + * 3. Public updates come third + */ + accumulateTransactionAndExecutedStateUpdates(tx: Tx, publicExecutionResults: NewStateUpdates) { + const { nullifiers, noteHashes, publicDataWrites } = publicExecutionResults; + const { + nullifiers: nonRevertibleNullifiers, + noteHashes: nonRevertibleNoteHashes, + publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests, + } = tx.data.forPublic!.endNonRevertibleData; + const { + nullifiers: txNullifiers, + noteHashes: txNoteHashes, + publicDataUpdateRequests: txPublicDataUpdateRequests, + } = tx.data.forPublic!.end; + + const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); + const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); + const publicNullifiers = getNonEmptyItems(nullifiers); + + const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => n.value); + const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => n.value); + const publicNoteHashes = getNonEmptyItems(noteHashes); + + const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); + const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter( + p => !p.isEmpty(), + ); + const nonEmptyPublicDataWrites = publicDataWrites.filter(p => !p.isEmpty()); + + const allNullifiers = padArrayEnd( + [...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], + Fr.ZERO, + MAX_NULLIFIERS_PER_TX, + ); + const allNoteHashes = padArrayEnd( + [...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], + Fr.ZERO, + MAX_NOTE_HASHES_PER_TX, + ); + const allPublicDataUpdateRequests = [ + ...nonEmptyNonRevertiblePublicDataUpdateRequests, + ...nonEmptyTxPublicDataUpdateRequests, + ...nonEmptyPublicDataWrites, + ]; + + return { + nullifiers: allNullifiers, + noteHashes: allNoteHashes, + publicDataWrites: allPublicDataUpdateRequests, + }; + } + + async writeStateUpdates(stateUpdates: NewStateUpdates) { + const { nullifiers, noteHashes, publicDataWrites } = stateUpdates; + + // Convert public state toto the tree leaves + const allPublicDataWrites = publicDataWrites.map( + ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), + ); + + await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); + await this.merkleTrees.batchInsert( + MerkleTreeId.NULLIFIER_TREE, + nullifiers.map(n => n.toBuffer()), + NULLIFIER_SUBTREE_HEIGHT, + ); + await this.merkleTrees.batchInsert( + MerkleTreeId.PUBLIC_DATA_TREE, + allPublicDataWrites.map(x => x.toBuffer()), + PUBLIC_DATA_SUBTREE_HEIGHT, + ); + } + + public async executePublicCalls(tx: Tx) { + // Transactions are split up into a number of parts + // 1. Non revertible calls - these run with the public kernel setup + // - This includes fee payment transactions + // 2. Public Calls - These are the the noir code that are enqueued in the tx by the users + // - This runs with the public kernel app logic + // 3. Teardown Call - This is the public teardown call that does fee refunds + // - This runs with the public kernel teardown + await this.worldStateDB.addNewContracts(tx); + + // Call stacks + const nonRevertibleCalls = tx.getNonRevertiblePublicExecutionRequests(); + const publicCalls = tx.getRevertiblePublicExecutionRequests(); + const teardownCall = tx.getPublicTeardownExecutionRequest()!; + + // Gas + const teardownGasLimit = tx.data.constants.txContext.gasSettings.getTeardownLimits(); + let gasRemaining = tx.data.constants.txContext.gasSettings.getLimits().sub(teardownGasLimit); + + // Store the successful results for db insertions + const nonRevertiblePublicExecutionResults: PublicExecutionResult[] = []; + const publicExecutionResults: PublicExecutionResult[] = []; + + // Get side effect counters + const publicKernelOutput = tx.data.toPublicKernelCircuitPublicInputs(); + let startSideEffectCounter = publicKernelOutput.endSideEffectCounter + 1; + + // Execute the non revertible calls + for (const call of nonRevertibleCalls) { + const res = await this.publicExecutor.simulate( + call, + this.globalVariables, + gasRemaining, + tx.data.constants.txContext, + this.pendingNullifiers, + Fr.ZERO, + startSideEffectCounter, + ); + startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; + gasRemaining = Gas.from(res.endGasLeft); + + if (!this.temporaryDidCallOrNestedCallRevert(res)) { + this.addNullifiers(res.nullifiers); + nonRevertiblePublicExecutionResults.push(res); + await this.worldStateDB.checkpoint(); + } else { + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCommit(); + + return; + } } - validateTransactions(txs: Tx[]) { - // TODO: Run tx validator checks - // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); - - // TODO(md): check the last valid block number is close - - // If any of the transactions are invalid, we throw an error - // if (invalidTxs.length > 0) { - // console.log("invalid txs found"); - // throw new InvalidTransactionsFound(); + for (const call of publicCalls) { + // Execute the non revertible calls + const res = await this.publicExecutor.simulate( + call, + this.globalVariables, + gasRemaining, + tx.data.constants.txContext, + this.pendingNullifiers, + Fr.ZERO, + startSideEffectCounter, + ); + startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; + gasRemaining = Gas.from(res.endGasLeft); + + if (!this.temporaryDidCallOrNestedCallRevert(res)) { + this.addNullifiers(res.nullifiers); + publicExecutionResults.push(res); + } else { + // Similarly, if a teardown call fails, it will revert + // back to the setup state + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCheckpoint(); + } } - - async executeEnqueuedCallsAndApplyStateUpdates(tx: Tx) { - const publicExecutionResults = await this.executePublicCalls(tx); - - // The transaction has reverted in setup - the block should fail - if (!publicExecutionResults) { - throw new Error("Reverted in setup"); - } - - if (tx.data.forPublic) { - const stateUpdates = this.accumulateTransactionAndExecutedStateUpdates(tx, publicExecutionResults); - await this.writeStateUpdates(stateUpdates); - } else { - throw new Error("Public transaction did have public data"); - } + if (teardownCall) { + const res = await this.publicExecutor.simulate( + teardownCall, + this.globalVariables, + teardownGasLimit, + tx.data.constants.txContext, + this.pendingNullifiers, + Fr.ZERO, + startSideEffectCounter, + ); + + if (!this.temporaryDidCallOrNestedCallRevert(res)) { + this.addNullifiers(res.nullifiers); + publicExecutionResults.push(res); + } else { + // Similarly, if a public calls fail, it will revert + // back to the setup state + await this.worldStateDB.removeNewContracts(tx); + await this.worldStateDB.rollbackToCheckpoint(); + } } - /** - * Take the state updates from each of the transactions and merge them together - * - * 1. Non Revertible calls come first - * 2. Private updates come second - * 3. Public updates come third - */ - accumulateTransactionAndExecutedStateUpdates(tx: Tx, publicExecutionResults: NewStateUpdates) { - const {nullifiers, noteHashes, publicDataWrites} = publicExecutionResults; - const {nullifiers: nonRevertibleNullifiers, noteHashes: nonRevertibleNoteHashes, publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests} = tx.data.forPublic!.endNonRevertibleData; - const {nullifiers: txNullifiers, noteHashes: txNoteHashes, publicDataUpdateRequests: txPublicDataUpdateRequests} = tx.data.forPublic!.end; - - const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); - const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); - const publicNullifiers = getNonEmptyItems(nullifiers); - - const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => n.value); - const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => n.value); - const publicNoteHashes = getNonEmptyItems(noteHashes); - - const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); - const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter(p => !p.isEmpty()); - const nonEmptyPublicDataWrites = publicDataWrites.filter(p => !p.isEmpty()); - - const allNullifiers = padArrayEnd([...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], Fr.ZERO, MAX_NULLIFIERS_PER_TX); - const allNoteHashes = padArrayEnd([...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], Fr.ZERO, MAX_NOTE_HASHES_PER_TX); - const allPublicDataUpdateRequests = [...nonEmptyNonRevertiblePublicDataUpdateRequests, ...nonEmptyTxPublicDataUpdateRequests, ...nonEmptyPublicDataWrites]; - - return { - nullifiers: allNullifiers, - noteHashes: allNoteHashes, - publicDataWrites: allPublicDataUpdateRequests - } + await this.worldStateDB.commit(); + + // Sweep up performed state updates + return this.aggregateResults(nonRevertiblePublicExecutionResults, publicExecutionResults); + } + + aggregateResults(nonRevertibleResults: PublicExecutionResult[], revertibleResults: PublicExecutionResult[]) { + const nonRevertibleNestedExecutions = nonRevertibleResults.flatMap(res => this.collectNestedExecutions(res)); + const nonRevertible = this.aggregatePublicExecutionResults(nonRevertibleNestedExecutions); + + const revertibleNestedExecutions = revertibleResults.flatMap(res => this.collectNestedExecutions(res)); + const revertible = this.aggregatePublicExecutionResults(revertibleNestedExecutions); + + return { + nullifiers: [...nonRevertible.nullifiers, ...revertible.nullifiers], + noteHashes: [...nonRevertible.newNoteHashes, ...revertible.newNoteHashes], + publicDataWrites: [...nonRevertible.publicDataWrites, ...revertible.publicDataWrites], + }; + } + + aggregatePublicExecutionResults(results: PublicExecutionResult[]) { + const txCallNewNullifiers: ScopedNullifier[][] = []; + const txCallNewNoteHashes: ScopedNoteHash[][] = []; + const txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; + + for (const res of results) { + // Scope the nullifiers, note hashes and public data writes to the contract address + txCallNewNullifiers.push( + getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress)), + ); + txCallNewNoteHashes.push( + getNonEmptyItems(res.noteHashes).map(n => n.scope(res.executionRequest.contractAddress)), + ); + txCallPublicDataWrites.push( + getNonEmptyItems(res.contractStorageUpdateRequests).map(req => + PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req), + ), + ); } - async writeStateUpdates(stateUpdates: NewStateUpdates) { - const {nullifiers, noteHashes, publicDataWrites} = stateUpdates; - - // Convert public state toto the tree leaves - const allPublicDataWrites = publicDataWrites.map( - ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), - ); - - await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); - await this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT); - await this.merkleTrees.batchInsert(MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map(x => x.toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT); + // Reverse + const newNullifiers = txCallNewNullifiers.flat(); + const newNoteHashes = txCallNewNoteHashes.flat(); + const newPublicDataWrites = txCallPublicDataWrites.flat(); + + // Squash data writes + const uniquePublicDataWrites = this.removeDuplicatesFromStart(newPublicDataWrites); + + const returning = { + nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), + newNoteHashes: newNoteHashes.map(n => siloNoteHash(n.contractAddress, n.value)), + publicDataWrites: uniquePublicDataWrites, + }; + return returning; + } + + collectNestedExecutions(result: PublicExecutionResult) { + const nestedExecutions: PublicExecutionResult[] = []; + for (const res of result.nestedExecutions) { + nestedExecutions.push(...this.collectNestedExecutions(res)); } + return [result, ...nestedExecutions]; + } + + // There is an assumption based on how the block builder works, that the transactions + // provided here CANNOT revert, else they are not added to the block as the kernels + // will fail + // This will change whenever the vm is changed to be able to revert + public async applyPrivateStateUpdates(tx: Tx) { + const insertionPromises: Promise[] = []; + if (tx.data.forRollup) { + const { nullifiers, noteHashes } = tx.data.forRollup.end; + if (nullifiers) { + insertionPromises.push( + this.merkleTrees.batchInsert( + MerkleTreeId.NULLIFIER_TREE, + nullifiers.map(n => n.toBuffer()), + NULLIFIER_SUBTREE_HEIGHT, + ), + ); + } - // NOTES: - // - gas accounting needs to be done here and - // - maybe make another transaction context that stores the gas for a trnsactions - public async executePublicCalls(tx: Tx) { - // Transactions are split up into a number of parts - // 1. Non revertible calls - these run with the public kernel setup - // - This includes fee payment transactions - // 2. Public Calls - These are the the noir code that are enqueued in the tx by the users - // - This runs with the public kernel app logic - // 3. Teardown Call - This is the public teardown call that does fee refunds - // - This runs with the public kernel teardown - await this.worldStateDB.addNewContracts(tx); - - const nonRevertibleCalls = tx.getNonRevertiblePublicExecutionRequests(); - const publicCalls = tx.getRevertiblePublicExecutionRequests(); - const teardownCall = tx.getPublicTeardownExecutionRequest()!; - - // TODO(md): gas - const transactionGas = tx.data.constants.txContext.gasSettings.getLimits(); - - // Store the successful results for db insertions - const nonRevertiblePublicExecutionResults: PublicExecutionResult[] = []; - const publicExecutionResults: PublicExecutionResult[] = []; - - const publicKernelOutput = tx.data.toPublicKernelCircuitPublicInputs(); - let startSideEffectCounter = publicKernelOutput.endSideEffectCounter + 1; - - // Execute the non revertible calls - for (const call of nonRevertibleCalls) { - const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers, Fr.ZERO, startSideEffectCounter); - startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; - - if (!this.temporaryDidCallOrNestedCallRevert(res)) { - this.addNullifiers(res.nullifiers); - nonRevertiblePublicExecutionResults.push(res); - this.worldStateDB.checkpoint(); - } else { - await this.worldStateDB.removeNewContracts(tx); - await this.worldStateDB.rollbackToCommit(); - - // TODO: When this part fails, we want skip to some cleanup part - // This probably explains the interesting control flow - return; - } - } - - for (const call of publicCalls) { - // Execute the non revertible calls - const res = await this.publicExecutor.simulate(call, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers, Fr.ZERO, startSideEffectCounter); - startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; - - if (!this.temporaryDidCallOrNestedCallRevert(res)) { - this.addNullifiers(res.nullifiers); - publicExecutionResults.push(res); - } else { - // Similarly, if a teardown call fails, it will revert - // back to the setup state - await this.worldStateDB.removeNewContracts(tx); - await this.worldStateDB.rollbackToCheckpoint(); - } - } - - if (teardownCall) { - const res = await this.publicExecutor.simulate(teardownCall, this.globalVariables, transactionGas, tx.data.constants.txContext, this.pendingNullifiers, Fr.ZERO, startSideEffectCounter); - - if (!this.temporaryDidCallOrNestedCallRevert(res)) { - this.addNullifiers(res.nullifiers); - publicExecutionResults.push(res); - } else { - // Similarly, if a public calls fail, it will revert - // back to the setup state - await this.worldStateDB.removeNewContracts(tx); - await this.worldStateDB.rollbackToCheckpoint(); - } - } - - await this.worldStateDB.commit(); - - return this.aggregateResults(nonRevertiblePublicExecutionResults, publicExecutionResults); - } - - // TODO: dont think splitting these up actually matters - aggregateResults(nonRevertibleResults: PublicExecutionResult[], revertibleResults: PublicExecutionResult[]) { - const nonRevertibleNestedExecutions = nonRevertibleResults.flatMap(res => this.collectNestedExecutions(res)); - const nonRevertible = this.aggregatePublicExecutionResults(nonRevertibleNestedExecutions); - - const revertibleNestedExecutions = revertibleResults.flatMap(res => this.collectNestedExecutions(res)); - const revertible = this.aggregatePublicExecutionResults(revertibleNestedExecutions); - - return { - nullifiers: [...nonRevertible.nullifiers, ...revertible.nullifiers], - noteHashes: [...nonRevertible.newNoteHashes, ...revertible.newNoteHashes], - publicDataWrites: [...nonRevertible.publicDataWrites, ...revertible.publicDataWrites] - } - } - - // This is just getting the private state updates after executing them - // TODO(md): think about these - aggregatePublicExecutionResults(results: PublicExecutionResult[]) { - const txCallNewNullifiers: ScopedNullifier[][] = []; - const txCallNewNoteHashes: ScopedNoteHash[][] = []; - const txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; - - for (const res of results) { - const enqueuedCallNewNullifiers = []; - const enqueuedCallNewNoteHashes = []; - let enqueuedCallPublicDataWrites = []; - - // Scope the nullifiers, note hashes and public data writes to the contract address - enqueuedCallNewNullifiers.push(...getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress))); - enqueuedCallNewNoteHashes.push(...getNonEmptyItems(res.noteHashes).map(n => n.scope(res.executionRequest.contractAddress))); - enqueuedCallPublicDataWrites.push(...getNonEmptyItems(res.contractStorageUpdateRequests).map(req => PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req))); - - txCallNewNullifiers.push(enqueuedCallNewNullifiers); - txCallNewNoteHashes.push(enqueuedCallNewNoteHashes); - txCallPublicDataWrites.push(enqueuedCallPublicDataWrites); - } - - // Reverse - let newNullifiers = txCallNewNullifiers.flat(); - let newNoteHashes = txCallNewNoteHashes.flat(); - const newPublicDataWrites = txCallPublicDataWrites.flat(); - - // Squash data writes - let uniquePublicDataWrites = this.removeDuplicatesFromStart(newPublicDataWrites); - - const returning = { - nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), - newNoteHashes: newNoteHashes.map(n => siloNoteHash(n.contractAddress, n.value)), - publicDataWrites: uniquePublicDataWrites - }; - return returning; + if (noteHashes) { + insertionPromises.push(this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes)); + } } - collectNestedExecutions(result: PublicExecutionResult) { - const nestedExecutions: PublicExecutionResult[] = []; - for (const res of result.nestedExecutions) { - nestedExecutions.push(...this.collectNestedExecutions(res)); - } - return [result, ...nestedExecutions]; + await Promise.all(insertionPromises); + } + + /** + * Remove duplicates keeping the oldest item, where duplicates are defined by the leaf slot and side effect counter + * @param items + * @returns + */ + removeDuplicatesFromStart(items: PublicDataUpdateRequest[]) { + const slotMap: OrderedMap = new OrderedMap(); + + for (const obj of items) { + const { leafSlot, sideEffectCounter } = obj; + + if (!slotMap.has(leafSlot.toBigInt())) { + slotMap.set(leafSlot.toBigInt(), obj); + } + if (sideEffectCounter > slotMap.get(leafSlot.toBigInt())!.sideEffectCounter) { + // Remove the first instance + slotMap.delete(leafSlot.toBigInt()); + slotMap.set(leafSlot.toBigInt(), obj); + } } + return Array.from(slotMap.values()); + } - - // There is an assumption based on how the block builder works, that the transactions - // provided here CANNOT revert, else they are not added to the block as the kernels - // will fail - // This will change whenever the vm is changed to be able to revert - public async applyPrivateStateUpdates(tx: Tx) { - const insertionPromises: Promise[] = []; - if (tx.data.forRollup) { - const {nullifiers, noteHashes} = tx.data.forRollup.end; - if (nullifiers) { - insertionPromises.push(this.merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, nullifiers.map(n => n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT)); - } - - if (noteHashes) { - insertionPromises.push(this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes)); - } - } - - await Promise.all(insertionPromises); + // This is a temporary method, in our current kernel model, + // nested calls will trigger an entire execution reversion + // if any nested call reverts + temporaryDidCallOrNestedCallRevert(result: PublicExecutionResult) { + if (result.revertReason) { + return true; } - - /** - * Remove duplicates keeping the oldest item, where duplicates are defined by the leaf slot and side effect counter - * @param items - * @returns - */ - removeDuplicatesFromStart(items: PublicDataUpdateRequest[]) { - const slotMap: OrderedMap = new OrderedMap(); - - for (const obj of items) { - const { leafSlot, sideEffectCounter } = obj; - - if (!slotMap.has(leafSlot.toBigInt())) { - slotMap.set(leafSlot.toBigInt(), obj); - } - if (sideEffectCounter > slotMap.get(leafSlot.toBigInt())!.sideEffectCounter) { - // Remove the first instance - slotMap.delete(leafSlot.toBigInt()); - slotMap.set(leafSlot.toBigInt(), obj); - } - } - - return Array.from(slotMap.values()); - } - - - // This is a temporary method, in our current kernel model, - // nested calls will trigger an entire execution reversion - // if any nested call reverts - temporaryDidCallOrNestedCallRevert(result: PublicExecutionResult) { - if (result.revertReason) { - return true; - } - if (result.nestedExecutions.length > 0) { - for (const nested of result.nestedExecutions) { - if (this.temporaryDidCallOrNestedCallRevert(nested)) { - return true; - } - } + if (result.nestedExecutions.length > 0) { + for (const nested of result.nestedExecutions) { + if (this.temporaryDidCallOrNestedCallRevert(nested)) { + return true; } - return false; + } } + return false; + } } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 126a8d27ab5..330986992a2 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,4 +1,4 @@ -import { MerkleTreeId, type Tx, type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; +import { type Tx, type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; @@ -10,7 +10,6 @@ import { type ValidatorClientConfig } from './config.js'; import { ValidationService } from './duties/validation_service.js'; import { AttestationTimeoutError, - FailedToReExecuteTransactionsError, InvalidValidatorPrivateKeyError, PublicProcessorNotProvidedError, TransactionsNotAvailableError, @@ -18,7 +17,7 @@ import { } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; -import { LightPublicProcessor, type LightPublicProcessorFactory } from '@aztec/simulator'; +import { type LightPublicProcessor, type LightPublicProcessorFactory } from '@aztec/simulator'; import { Timer } from '@aztec/foundation/timer'; export interface Validator { From 58861dc782180e72b72751838ef20615fdc2724e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 01:37:42 +0000 Subject: [PATCH 080/123] chore: restore tx validator --- yarn-project/simulator/src/public/index.ts | 2 +- .../src/public/light_public_processor.test.ts | 8 +- .../src/public/light_public_processor.ts | 68 ++++------- .../duties/light_public_processor_factory.ts | 46 ++++++++ yarn-project/validator-client/src/factory.ts | 22 ++-- .../validator-client/src/validator.test.ts | 9 +- .../validator-client/src/validator.ts | 110 +++++++++--------- 7 files changed, 151 insertions(+), 114 deletions(-) create mode 100644 yarn-project/validator-client/src/duties/light_public_processor_factory.ts diff --git a/yarn-project/simulator/src/public/index.ts b/yarn-project/simulator/src/public/index.ts index dc5f2e768b6..5bc58ac6e62 100644 --- a/yarn-project/simulator/src/public/index.ts +++ b/yarn-project/simulator/src/public/index.ts @@ -8,5 +8,5 @@ export * from './public_db_sources.js'; export * from './public_kernel.js'; export * from './public_kernel_circuit_simulator.js'; export { PublicProcessor, PublicProcessorFactory } from './public_processor.js'; -export { LightPublicProcessor, LightPublicProcessorFactory } from './light_public_processor.js'; +export { LightPublicProcessor } from './light_public_processor.js'; export { PublicSideEffectTrace } from './side_effect_trace.js'; diff --git a/yarn-project/simulator/src/public/light_public_processor.test.ts b/yarn-project/simulator/src/public/light_public_processor.test.ts index b54d1c18d53..5fe782e1599 100644 --- a/yarn-project/simulator/src/public/light_public_processor.test.ts +++ b/yarn-project/simulator/src/public/light_public_processor.test.ts @@ -53,11 +53,17 @@ describe('public_processor', () => { beforeEach(() => { db = mock(); worldStateDB = mock(); + txValidator = mock>(); root = Buffer.alloc(32, 5); db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); worldStateDB.storageRead.mockResolvedValue(Fr.ZERO); + + // Always return true for validation + txValidator.validateTxs.mockImplementation((txs: any[]) => { + return Promise.resolve([txs, []]); + }); }); describe('Light Public Processor', () => { @@ -94,8 +100,6 @@ describe('public_processor', () => { db.getPreviousValueIndex.mockResolvedValue({ index: 0n, alreadyPresent: true }); db.getLeafPreimage.mockResolvedValue(new PublicDataTreeLeafPreimage(new Fr(0), new Fr(0), new Fr(0), 0n)); - txValidator = mock>(); - processor = new LightPublicProcessor( db, worldStateDB, diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts index e8c2d93aa7c..da2637675d7 100644 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ b/yarn-project/simulator/src/public/light_public_processor.ts @@ -1,5 +1,5 @@ // A minimal version of the public processor - that does not have the fluff -import { MerkleTreeId, Tx, type TxValidator, type WorldStateSynchronizer } from '@aztec/circuit-types'; +import { MerkleTreeId, Tx, type TxValidator } from '@aztec/circuit-types'; import { Gas, type GlobalVariables, @@ -21,44 +21,11 @@ import { makeTuple } from '@aztec/foundation/array'; import { OrderedMap, padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { type TelemetryClient } from '@aztec/telemetry-client'; -import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { type PublicExecutionResult } from './execution.js'; import { PublicExecutor } from './executor.js'; -import { WorldStateDB } from './public_db_sources.js'; - -export class LightPublicProcessorFactory { - constructor( - private worldStateSynchronizer: WorldStateSynchronizer, - private contractDataSource: ContractDataSource, - private telemetryClient: TelemetryClient, - ) {} - - public async createWithSyncedState( - targetBlockNumber: number, - maybeHistoricalHeader: Header | undefined, - globalVariables: GlobalVariables, - txValidator: TxValidator, - ) { - // Make sure the world state synchronizer is synced - await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); - - // We will sync again whenever the block is created this could be an inefficiency - const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); - const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); - const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); - - return new LightPublicProcessor( - merkleTrees, - worldStateDB, - globalVariables, - historicalHeader, - txValidator, - this.telemetryClient, - ); - } -} +import { type WorldStateDB } from './public_db_sources.js'; export class InvalidTransactionsFound extends Error { constructor() { @@ -90,7 +57,7 @@ export class LightPublicProcessor { private worldStateDB: WorldStateDB, private globalVariables: GlobalVariables, historicalHeader: Header, - txValidator: TxValidator, + private txValidator: TxValidator, telemetryClient: TelemetryClient, ) { this.publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); @@ -121,7 +88,7 @@ export class LightPublicProcessor { // TODO(md): do we need dummy transactions? txs = txs.map(tx => Tx.clone(tx)); - this.validateTransactions(txs); + await this.validateTransactions(txs); for (const tx of txs) { if (tx.hasPublicCalls()) { @@ -143,16 +110,27 @@ export class LightPublicProcessor { } } - validateTransactions(_txs: Tx[]) { - // TODO: Run tx validator checks - // const [_, invalidTxs] = await this.txValidator.validateTxs(txs); - // TODO(md): check the last valid block number is close - // If any of the transactions are invalid, we throw an error - // if (invalidTxs.length > 0) { - // console.log("invalid txs found"); - // throw new InvalidTransactionsFound(); + /** + * Validate the transactions + * + * If any of the transactions are invalid, we throw an error + * Halting block validation + * @param txs - The transactions to validate + */ + async validateTransactions(txs: Tx[]) { + const [_, invalidTxs] = await this.txValidator.validateTxs(txs); + + if (invalidTxs.length > 0) { + throw new InvalidTransactionsFound(); + } } + /** + * Execute the public calls and apply the state updates + * + * If the transaction has reverted in setup, we throw an error + * @param tx - The transaction to execute + */ async executeEnqueuedCallsAndApplyStateUpdates(tx: Tx) { const publicExecutionResults = await this.executePublicCalls(tx); diff --git a/yarn-project/validator-client/src/duties/light_public_processor_factory.ts b/yarn-project/validator-client/src/duties/light_public_processor_factory.ts new file mode 100644 index 00000000000..6ca00465fb3 --- /dev/null +++ b/yarn-project/validator-client/src/duties/light_public_processor_factory.ts @@ -0,0 +1,46 @@ +import { type Tx, type TxValidator, type WorldStateSynchronizer } from '@aztec/circuit-types'; +import { type GlobalVariables, type Header } from '@aztec/circuits.js'; +import { AggregateTxValidator, DataTxValidator, DoubleSpendTxValidator, MetadataTxValidator } from '@aztec/p2p'; +import { LightPublicProcessor, WorldStateDB } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { type ContractDataSource } from '@aztec/types/contracts'; + +export class LightPublicProcessorFactory { + constructor( + private worldStateSynchronizer: WorldStateSynchronizer, + private contractDataSource: ContractDataSource, + private telemetryClient: TelemetryClient, + ) {} + + public async createWithSyncedState( + targetBlockNumber: number, + maybeHistoricalHeader: Header | undefined, + globalVariables: GlobalVariables, + ) { + // Make sure the world state synchronizer is synced + await this.worldStateSynchronizer.syncImmediate(targetBlockNumber); + + // We will sync again whenever the block is created this could be an inefficiency + const merkleTrees = await this.worldStateSynchronizer.ephemeralFork(); + const historicalHeader = maybeHistoricalHeader ?? merkleTrees.getInitialHeader(); + const worldStateDB = new WorldStateDB(merkleTrees, this.contractDataSource); + + // Define tx validators - we assume proof verification is performed over the mempool + // so we do not include it here + const txValidaors: TxValidator[] = [ + new DataTxValidator(), + new MetadataTxValidator(globalVariables.chainId, globalVariables.blockNumber), + new DoubleSpendTxValidator(worldStateDB), + ]; + const txValidator: TxValidator = new AggregateTxValidator(...txValidaors); + + return new LightPublicProcessor( + merkleTrees, + worldStateDB, + globalVariables, + historicalHeader, + txValidator, + this.telemetryClient, + ); + } +} diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 62ef84449f3..47e5cac0666 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -1,15 +1,21 @@ +import { type ArchiveSource } from '@aztec/archiver'; +import { type WorldStateSynchronizer } from '@aztec/circuit-types'; import { type P2P } from '@aztec/p2p'; import { generatePrivateKey } from 'viem/accounts'; +import { type TelemetryClient } from '../../telemetry-client/src/telemetry.js'; import { type ValidatorClientConfig } from './config.js'; +import { LightPublicProcessorFactory } from './duties/light_public_processor_factory.js'; import { ValidatorClient } from './validator.js'; -import { LightPublicProcessorFactory } from '@aztec/simulator'; -import { type ArchiveSource } from '@aztec/archiver'; -import { type TelemetryClient } from '../../telemetry-client/src/telemetry.js'; -import { type WorldStateSynchronizer } from '@aztec/circuit-types'; -export async function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P, worldStateSynchronizer: WorldStateSynchronizer, archiver: ArchiveSource, telemetry: TelemetryClient) { +export function createValidatorClient( + config: ValidatorClientConfig, + p2pClient: P2P, + worldStateSynchronizer: WorldStateSynchronizer, + archiver: ArchiveSource, + telemetry: TelemetryClient, +) { if (config.disableValidator) { return undefined; } @@ -19,11 +25,7 @@ export async function createValidatorClient(config: ValidatorClientConfig, p2pCl // We only craete a public processor factory if re-execution is enabled if (config.validatorReEx) { - const publicProcessorFactory = new LightPublicProcessorFactory( - worldStateSynchronizer, - archiver, - telemetry - ); + const publicProcessorFactory = new LightPublicProcessorFactory(worldStateSynchronizer, archiver, telemetry); return ValidatorClient.new(config, p2pClient, publicProcessorFactory); } diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index aa93aa804b4..077c29ba261 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -6,7 +6,7 @@ import { makeHeader } from '@aztec/circuits.js/testing'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type P2P } from '@aztec/p2p'; -import { type LightPublicProcessor, type LightPublicProcessorFactory } from '@aztec/simulator'; +import { type LightPublicProcessor } from '@aztec/simulator'; import { describe, expect, it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -14,6 +14,7 @@ import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from import { makeBlockProposal } from '../../circuit-types/src/p2p/mocks.js'; import { type ValidatorClientConfig } from './config.js'; +import { type LightPublicProcessorFactory } from './duties/light_public_processor_factory.js'; import { AttestationTimeoutError, InvalidValidatorPrivateKeyError, @@ -57,7 +58,7 @@ describe('ValidationService', () => { expect(() => ValidatorClient.new(config, p2pClient)).toThrow(InvalidValidatorPrivateKeyError); }); - it("Should throw an error if re-execution is enabled but no public processor is provided", () => { + it('Should throw an error if re-execution is enabled but no public processor is provided', () => { config.validatorReEx = true; expect(() => ValidatorClient.new(config, p2pClient)).toThrow(PublicProcessorNotProvidedError); }); @@ -94,7 +95,7 @@ describe('ValidationService', () => { ); }); - it("Should not return an attestation if re-execution fails", async () => { + it('Should not return an attestation if re-execution fails', async () => { const proposal = makeBlockProposal(); // mock the p2pClient.getTxStatus to return undefined for all transactions @@ -107,5 +108,5 @@ describe('ValidationService', () => { const attestation = await val.attestToProposal(proposal); expect(attestation).toBeUndefined(); - }) + }); }); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 330986992a2..b518b46f3a9 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,24 +1,25 @@ -import { type Tx, type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/circuit-types'; +import { type BlockAttestation, type BlockProposal, type Tx, type TxHash } from '@aztec/circuit-types'; import { type Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; -import { DoubleSpendTxValidator, type P2P } from '@aztec/p2p'; +import { Timer } from '@aztec/foundation/timer'; +import { type P2P } from '@aztec/p2p'; +import { type LightPublicProcessor } from '@aztec/simulator'; import { type ValidatorClientConfig } from './config.js'; +import { type LightPublicProcessorFactory } from './duties/light_public_processor_factory.js'; import { ValidationService } from './duties/validation_service.js'; import { AttestationTimeoutError, InvalidValidatorPrivateKeyError, PublicProcessorNotProvidedError, + ReExStateMismatchError, TransactionsNotAvailableError, - ReExStateMismatchError } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; -import { type LightPublicProcessor, type LightPublicProcessorFactory } from '@aztec/simulator'; -import { Timer } from '@aztec/foundation/timer'; export interface Validator { start(): Promise; @@ -32,7 +33,6 @@ export interface Validator { collectAttestations(proposal: BlockProposal, numberOfRequiredAttestations: number): Promise; } - /** Validator Client */ export class ValidatorClient implements Validator { @@ -49,25 +49,24 @@ export class ValidatorClient implements Validator { this.log.verbose('Initialized validator'); } - static new(config: ValidatorClientConfig, p2pClient: P2P, publicProcessorFactory?: LightPublicProcessorFactory | undefined) { + static new( + config: ValidatorClientConfig, + p2pClient: P2P, + publicProcessorFactory?: LightPublicProcessorFactory | undefined, + ) { if (!config.validatorPrivateKey) { throw new InvalidValidatorPrivateKeyError(); } //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 - if (config.validatorReEx && publicProcessorFactory === undefined){ + if (config.validatorReEx && publicProcessorFactory === undefined) { throw new PublicProcessorNotProvidedError(); } const privateKey = validatePrivateKey(config.validatorPrivateKey); const localKeyStore = new LocalKeyStore(privateKey); - const validator = new ValidatorClient( - localKeyStore, - p2pClient, - config, - publicProcessorFactory, - ); + const validator = new ValidatorClient(localKeyStore, p2pClient, config, publicProcessorFactory); validator.registerBlockProposalHandler(); return validator; } @@ -96,7 +95,6 @@ export class ValidatorClient implements Validator { this.log.verbose(`Re-executing transactions in the proposal before attesting`); await this.reExecuteTransactions(proposal); } - } catch (error: any) { if (error instanceof TransactionsNotAvailableError) { this.log.error(`Transactions not available, skipping attestation ${error.message}`); @@ -119,7 +117,7 @@ export class ValidatorClient implements Validator { * @param proposal - The proposal to re-execute */ async reExecuteTransactions(proposal: BlockProposal) { - const {header, txHashes} = proposal.payload; + const { header, txHashes } = proposal.payload; const txs = (await Promise.all(txHashes.map(tx => this.p2pClient.getTxByHash(tx)))).filter(tx => tx !== undefined); @@ -129,28 +127,28 @@ export class ValidatorClient implements Validator { throw new TransactionsNotAvailableError(txHashes); } - // Assertion: This check will fail if re-execution is not enabled - if (!this.lightPublicProcessorFactory) { - throw new PublicProcessorNotProvidedError(); - } - - // TODO(md): make the transaction validator here - // TODO(md): for now breaking the rules and makeing the world state sync public on the factory - const txValidator = new DoubleSpendTxValidator((this.lightPublicProcessorFactory as any).worldStateSynchronizer); + // Assertion: This check will fail if re-execution is not enabled + if (!this.lightPublicProcessorFactory) { + throw new PublicProcessorNotProvidedError(); + } - // We sync the state to the previous block as the public processor will not process the current block - const targetBlockNumber = header.globalVariables.blockNumber.toNumber() - 1; - this.log.verbose(`Re-ex: Syncing state to block number ${targetBlockNumber}`); + // We sync the state to the previous block as the public processor will not process the current block + const targetBlockNumber = header.globalVariables.blockNumber.toNumber() - 1; + this.log.verbose(`Re-ex: Syncing state to block number ${targetBlockNumber}`); - const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState(targetBlockNumber, undefined, header.globalVariables, txValidator); + const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState( + targetBlockNumber, + undefined, + header.globalVariables, + ); - this.log.verbose(`Re-ex: Re-executing transactions`); - const timer = new Timer(); - await lightProcessor.process(txs as Tx[]); - this.log.verbose(`Re-ex: Re-execution complete ${timer.ms()}ms`); + this.log.verbose(`Re-ex: Re-executing transactions`); + const timer = new Timer(); + await lightProcessor.process(txs as Tx[]); + this.log.verbose(`Re-ex: Re-execution complete ${timer.ms()}ms`); - // This function will throw an error if state updates do not match - await this.checkReExecutedStateRoots(header, lightProcessor); + // This function will throw an error if state updates do not match + await this.checkReExecutedStateRoots(header, lightProcessor); } /** @@ -164,24 +162,32 @@ export class ValidatorClient implements Validator { * @param lightProcessor - The light processor to check */ private async checkReExecutedStateRoots(header: Header, lightProcessor: LightPublicProcessor) { - const [ - newNullifierTree, - newNoteHashTree, - newPublicDataTree, - ] = await lightProcessor.getTreeSnapshots(); - - if (!header.state.partial.nullifierTree.root.toBuffer().equals(newNullifierTree.root)) { - this.log.error(`Re-ex: nullifierTree does not match, ${header.state.partial.nullifierTree.root.toBuffer().toString('hex')} !== ${newNullifierTree.root.toString('hex')}`); - throw new ReExStateMismatchError(); - } - if (!header.state.partial.noteHashTree.root.toBuffer().equals(newNoteHashTree.root)) { - this.log.error(`Re-ex: noteHashTree does not match, ${header.state.partial.noteHashTree.root.toBuffer().toString('hex')} !== ${newNoteHashTree.root.toString('hex')}`); - throw new ReExStateMismatchError(); - } - if (!header.state.partial.publicDataTree.root.toBuffer().equals(newPublicDataTree.root)) { - this.log.error(`Re-ex: publicDataTree does not match, ${header.state.partial.publicDataTree.root.toBuffer().toString('hex')} !== ${newPublicDataTree.root.toString('hex')}`); - throw new ReExStateMismatchError(); - } + const [newNullifierTree, newNoteHashTree, newPublicDataTree] = await lightProcessor.getTreeSnapshots(); + + if (!header.state.partial.nullifierTree.root.toBuffer().equals(newNullifierTree.root)) { + this.log.error( + `Re-ex: nullifierTree does not match, ${header.state.partial.nullifierTree.root + .toBuffer() + .toString('hex')} !== ${newNullifierTree.root.toString('hex')}`, + ); + throw new ReExStateMismatchError(); + } + if (!header.state.partial.noteHashTree.root.toBuffer().equals(newNoteHashTree.root)) { + this.log.error( + `Re-ex: noteHashTree does not match, ${header.state.partial.noteHashTree.root + .toBuffer() + .toString('hex')} !== ${newNoteHashTree.root.toString('hex')}`, + ); + throw new ReExStateMismatchError(); + } + if (!header.state.partial.publicDataTree.root.toBuffer().equals(newPublicDataTree.root)) { + this.log.error( + `Re-ex: publicDataTree does not match, ${header.state.partial.publicDataTree.root + .toBuffer() + .toString('hex')} !== ${newPublicDataTree.root.toString('hex')}`, + ); + throw new ReExStateMismatchError(); + } } /** From 367ae2effe800124e8385558e5a2b3bf973c9d3e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 01:37:54 +0000 Subject: [PATCH 081/123] fix: dependency cycle in circuits.js --- yarn-project/aztec-node/src/aztec-node/server.ts | 2 +- yarn-project/circuits.js/src/hash/hash.ts | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index a4fbbfeebe9..d509407c4bf 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -166,7 +166,7 @@ export class AztecNodeService implements AztecNode { const simulationProvider = await createSimulationProvider(config, log); - const validatorClient = await createValidatorClient(config, p2pClient, worldStateSynchronizer, archiver, telemetry); + const validatorClient = createValidatorClient(config, p2pClient, worldStateSynchronizer, archiver, telemetry); // now create the sequencer const sequencer = config.disableSequencer diff --git a/yarn-project/circuits.js/src/hash/hash.ts b/yarn-project/circuits.js/src/hash/hash.ts index 6083676209d..eab83b28f0b 100644 --- a/yarn-project/circuits.js/src/hash/hash.ts +++ b/yarn-project/circuits.js/src/hash/hash.ts @@ -4,7 +4,8 @@ import { Fr } from '@aztec/foundation/fields'; import { numToUInt8, numToUInt16BE, numToUInt32BE } from '@aztec/foundation/serialize'; import { GeneratorIndex } from '../constants.gen.js'; -import { type ScopedL2ToL1Message, VerificationKey } from '../structs/index.js'; +import { type ScopedL2ToL1Message } from '../structs/l2_to_l1_message.js'; +import { VerificationKey } from '../structs/verification_key.js'; /** * Computes a hash of a given verification key. From 8da85d4264225e7041bc85cfdc5e0d635ba633a4 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 01:38:57 +0000 Subject: [PATCH 082/123] fix: yarn prepare --- yarn-project/validator-client/tsconfig.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/yarn-project/validator-client/tsconfig.json b/yarn-project/validator-client/tsconfig.json index a4198a7f0c2..f1ed8204d2e 100644 --- a/yarn-project/validator-client/tsconfig.json +++ b/yarn-project/validator-client/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../p2p" }, + { + "path": "../simulator" + }, { "path": "../types" } From 7fd3b4fbbdae2931dd24f89f67e4e3b1eb8cb027 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 01:56:26 +0000 Subject: [PATCH 083/123] nargo fmt --- .../noir-contracts/contracts/spam_contract/src/main.nr | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr index a8376de06f3..6e3f3e9601f 100644 --- a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr @@ -2,7 +2,6 @@ mod types; use dep::aztec::macros::aztec; - // A contract used for testing a random hodgepodge of small features from simulator and end-to-end tests. #[aztec] contract Spam { @@ -32,7 +31,12 @@ contract Spam { } #[private] - fn spam(nullifier_seed: Field, nullifier_count: u32, call_public: bool, call_recursive_spam: bool) { + fn spam( + nullifier_seed: Field, + nullifier_count: u32, + call_public: bool, + call_recursive_spam: bool + ) { let caller = context.msg_sender(); let caller_keys = get_public_keys(caller); let amount = U128::from_integer(1); From 2f54025affd048842b5c9fcab9ad6e407a02b87e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 3 Oct 2024 10:40:57 +0000 Subject: [PATCH 084/123] chore: revert header changes --- .../simulator/src/avm/avm_execution_environment.ts | 4 ++-- yarn-project/simulator/src/avm/avm_simulator.test.ts | 2 +- yarn-project/simulator/src/avm/fixtures/index.ts | 2 +- yarn-project/simulator/src/public/executor.ts | 8 ++++---- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/yarn-project/simulator/src/avm/avm_execution_environment.ts b/yarn-project/simulator/src/avm/avm_execution_environment.ts index e6b366e33de..9e067710bda 100644 --- a/yarn-project/simulator/src/avm/avm_execution_environment.ts +++ b/yarn-project/simulator/src/avm/avm_execution_environment.ts @@ -14,7 +14,7 @@ export class AvmExecutionEnvironment { public readonly functionSelector: FunctionSelector, // may be temporary (#7224) public readonly contractCallDepth: Fr, public readonly transactionFee: Fr, - public readonly historicalHeader: Header, + public readonly header: Header, public readonly globals: GlobalVariables, public readonly isStaticCall: boolean, public readonly isDelegateCall: boolean, @@ -35,7 +35,7 @@ export class AvmExecutionEnvironment { functionSelector, this.contractCallDepth.add(Fr.ONE), this.transactionFee, - this.historicalHeader, + this.header, this.globals, isStaticCall, isDelegateCall, diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 11c861f374a..a5dac0b3035 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -819,7 +819,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { /*nestedEnvironment=*/ expect.objectContaining({ sender: environment.address, // sender is top-level call contractCallDepth: new Fr(1), // top call is depth 0, nested is depth 1 - header: environment.historicalHeader, // just confirming that nested env looks roughly right + header: environment.header, // just confirming that nested env looks roughly right globals: environment.globals, // just confirming that nested env looks roughly right isStaticCall: isStaticCall, // TODO(7121): can't check calldata like this since it is modified on environment construction diff --git a/yarn-project/simulator/src/avm/fixtures/index.ts b/yarn-project/simulator/src/avm/fixtures/index.ts index 5956cf2e138..9d8e2183322 100644 --- a/yarn-project/simulator/src/avm/fixtures/index.ts +++ b/yarn-project/simulator/src/avm/fixtures/index.ts @@ -63,7 +63,7 @@ export function initExecutionEnvironment(overrides?: Partial Date: Thu, 3 Oct 2024 10:53:05 +0000 Subject: [PATCH 085/123] fmt --- yarn-project/simulator/src/public/executor.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 6aa5ce60651..f6e1e7a421e 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -21,11 +21,7 @@ import { PublicSideEffectTrace } from './side_effect_trace.js'; export class PublicExecutor { metrics: ExecutorMetrics; - constructor( - private readonly worldStateDB: WorldStateDB, - private readonly header: Header, - client: TelemetryClient, - ) { + constructor(private readonly worldStateDB: WorldStateDB, private readonly header: Header, client: TelemetryClient) { this.metrics = new ExecutorMetrics(client, 'PublicExecutor'); } From e1b8f888e9b64ca511c13b97337df0024cb681a6 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:41:22 +0000 Subject: [PATCH 086/123] remove processor --- .../contracts/spam_contract/src/main.nr | 87 +-- .../end-to-end/src/e2e_synching.test.ts | 8 +- .../foundation/src/collection/index.ts | 3 +- .../src/collection/ordered_map.test.ts | 101 --- .../foundation/src/collection/ordered_map.ts | 49 -- yarn-project/simulator/src/public/executor.ts | 1 - .../src/public/light_public_processor.test.ts | 697 ------------------ .../src/public/light_public_processor.ts | 458 ------------ 8 files changed, 32 insertions(+), 1372 deletions(-) delete mode 100644 yarn-project/foundation/src/collection/ordered_map.test.ts delete mode 100644 yarn-project/foundation/src/collection/ordered_map.ts delete mode 100644 yarn-project/simulator/src/public/light_public_processor.test.ts delete mode 100644 yarn-project/simulator/src/public/light_public_processor.ts diff --git a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr index 6e3f3e9601f..3523cc6e3f7 100644 --- a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr @@ -6,23 +6,21 @@ use dep::aztec::macros::aztec; #[aztec] contract Spam { - global MAX_EXTERNAL_CALL_SPAM = 4; - use dep::aztec::{ - prelude::{Map, AztecAddress, PublicMutable}, - encrypted_logs::{encrypted_note_emission::encode_and_encrypt_note_unconstrained}, + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_unconstrained, keys::getters::get_public_keys, + macros::{functions::{internal, private, public}, storage::storage}, + prelude::{AztecAddress, Map, PublicMutable}, protocol_types::{ - hash::poseidon2_hash_with_separator, - constants::{ - MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, GENERATOR_INDEX__NOTE_NULLIFIER, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL - } - }, - macros::{storage::storage, functions::{private, public, internal}} + constants::{ + GENERATOR_INDEX__NOTE_NULLIFIER, MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + }, + hash::poseidon2_hash_with_separator, + }, }; - use crate::types::{token_note::TokenNote, balance_set::BalanceSet}; + use crate::types::{balance_set::BalanceSet, token_note::TokenNote}; #[storage] struct Storage { @@ -31,43 +29,36 @@ contract Spam { } #[private] - fn spam( - nullifier_seed: Field, - nullifier_count: u32, - call_public: bool, - call_recursive_spam: bool - ) { + fn spam(nullifier_seed: Field, nullifier_count: u32, call_public: bool) { let caller = context.msg_sender(); - let caller_keys = get_public_keys(caller); + let caller_ovpk_m = get_public_keys(caller).ovpk_m; let amount = U128::from_integer(1); for _ in 0..MAX_NOTE_HASHES_PER_CALL { - storage.balances.at(caller).add(caller_keys.npk_m, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, caller_keys.ovpk_m, caller_keys.ivpk_m, caller) + storage.balances.at(caller).add(caller, U128::from_integer(amount)).emit( + encode_and_encrypt_note_unconstrained(&mut context, caller_ovpk_m, caller, caller), ); } for i in 0..MAX_NULLIFIERS_PER_CALL { if (i < nullifier_count) { - context.push_nullifier( - poseidon2_hash_with_separator( - [nullifier_seed, i as Field], - GENERATOR_INDEX__NOTE_NULLIFIER as Field - ) - ); + context.push_nullifier(poseidon2_hash_with_separator( + [nullifier_seed, i as Field], + GENERATOR_INDEX__NOTE_NULLIFIER as Field, + )); } } if (call_public) { - Spam::at(context.this_address()).public_spam(0, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL).enqueue(&mut context); - Spam::at(context.this_address()).public_spam( - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX - ).enqueue(&mut context); - } - - if (call_recursive_spam) { - Spam::at(context.this_address()).public_spam_with_external_call(0, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL).enqueue(&mut context); + Spam::at(context.this_address()) + .public_spam(0, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + .enqueue(&mut context); + Spam::at(context.this_address()) + .public_spam( + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + ) + .enqueue(&mut context); } } @@ -80,28 +71,4 @@ contract Spam { storage.public_balances.at(i as Field).write(prev + one); } } - - // TODO(md): add something to the public spam such that we can test further enqueued calls - #[public] - #[internal] - fn public_spam_with_external_call(start: u32, end: u32) { - let one = U128::from_integer(1); - for i in start..end { - let prev = storage.public_balances.at(i as Field).read(); - storage.public_balances.at(i as Field).write(prev + one); - } - - Spam::at(context.this_address()).recursive_spam(0).call(&mut context); - } - - #[public] - fn recursive_spam(counter: u32) { - let one = U128::from_integer(1); - if (counter < MAX_EXTERNAL_CALL_SPAM) { - let prev = storage.public_balances.at(counter as Field).read(); - storage.public_balances.at(counter as Field).write(prev + one); - - Spam::at(context.this_address()).recursive_spam(counter + 1).call(&mut context); - } - } } diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index e70b6dbddfe..ff524590f22 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -276,10 +276,10 @@ class TestVariant { const txs = []; for (let i = 0; i < this.txCount; i++) { const batch = new BatchCall(this.wallets[i], [ - this.spam.methods.spam(this.seed, 16, false, false).request(), - this.spam.methods.spam(this.seed + 16n, 16, false, false).request(), - this.spam.methods.spam(this.seed + 32n, 16, false, false).request(), - this.spam.methods.spam(this.seed + 48n, 15, true, false).request(), + this.spam.methods.spam(this.seed, 16, false).request(), + this.spam.methods.spam(this.seed + 16n, 16, false).request(), + this.spam.methods.spam(this.seed + 32n, 16, false).request(), + this.spam.methods.spam(this.seed + 48n, 15, true).request(), ]); this.seed += 100n; diff --git a/yarn-project/foundation/src/collection/index.ts b/yarn-project/foundation/src/collection/index.ts index d315cf2142f..63a52414e69 100644 --- a/yarn-project/foundation/src/collection/index.ts +++ b/yarn-project/foundation/src/collection/index.ts @@ -1,3 +1,2 @@ export * from './array.js'; -export * from './object.js'; -export * from './ordered_map.js'; +export * from './object.js'; \ No newline at end of file diff --git a/yarn-project/foundation/src/collection/ordered_map.test.ts b/yarn-project/foundation/src/collection/ordered_map.test.ts deleted file mode 100644 index 611dd6534ba..00000000000 --- a/yarn-project/foundation/src/collection/ordered_map.test.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { OrderedMap } from './ordered_map.js'; - -describe('OrderedMap', () => { - let orderedMap: OrderedMap; - - beforeEach(() => { - orderedMap = new OrderedMap(); - }); - - it('set should add new key-value pairs', () => { - orderedMap.set('a', 1); - expect(orderedMap.get('a')).toBe(1); - expect(orderedMap.has('a')).toBe(true); - }); - - it('set should update existing keys and move them to the end', () => { - orderedMap.set('a', 1); - orderedMap.set('b', 2); - orderedMap.set('a', 3); - expect(Array.from(orderedMap.values())).toEqual([2, 3]); - }); - - it('get should retrieve values for existing keys', () => { - orderedMap.set('a', 1); - expect(orderedMap.get('a')).toBe(1); - }); - - it('get should return undefined for non-existent keys', () => { - expect(orderedMap.get('a')).toBeUndefined(); - }); - - it('has should return true for existing keys', () => { - orderedMap.set('a', 1); - expect(orderedMap.has('a')).toBe(true); - }); - - it('has should return false for non-existent keys', () => { - expect(orderedMap.has('a')).toBe(false); - }); - - it('delete should remove existing keys and return true', () => { - orderedMap.set('a', 1); - expect(orderedMap.delete('a')).toBe(true); - expect(orderedMap.has('a')).toBe(false); - expect(Array.from(orderedMap.values())).toEqual([]); - }); - - test('delete should return false for non-existent keys', () => { - expect(orderedMap.delete('a')).toBe(false); - }); - - it('values should return an array of values in insertion order', () => { - orderedMap.set('a', 1); - orderedMap.set('b', 2); - orderedMap.set('c', 3); - expect(Array.from(orderedMap.values())).toEqual([1, 2, 3]); - }); - - it('values should reflect the updated order after re-insertion', () => { - orderedMap.set('a', 1); - orderedMap.set('b', 2); - orderedMap.set('a', 3); - expect(Array.from(orderedMap.values())).toEqual([2, 3]); - }); - - it('iterator should yield key-value pairs in insertion order', () => { - orderedMap.set('a', 1); - orderedMap.set('b', 2); - orderedMap.set('c', 3); - expect(Array.from(orderedMap)).toEqual([ - ['a', 1], - ['b', 2], - ['c', 3], - ]); - }); - - it('iterator should reflect the updated order after re-insertion', () => { - orderedMap.set('a', 1); - orderedMap.set('b', 2); - orderedMap.set('a', 3); - expect(Array.from(orderedMap)).toEqual([ - ['b', 2], - ['a', 3], - ]); - }); - - it('multiple operations should maintain correct order and values', () => { - orderedMap.set('a', 1); - orderedMap.set('b', 2); - orderedMap.set('c', 3); - orderedMap.delete('b'); - orderedMap.set('d', 4); - orderedMap.set('a', 5); - expect(Array.from(orderedMap.values())).toEqual([3, 4, 5]); - expect(Array.from(orderedMap)).toEqual([ - ['c', 3], - ['d', 4], - ['a', 5], - ]); - }); -}); diff --git a/yarn-project/foundation/src/collection/ordered_map.ts b/yarn-project/foundation/src/collection/ordered_map.ts deleted file mode 100644 index 8cb7fef1891..00000000000 --- a/yarn-project/foundation/src/collection/ordered_map.ts +++ /dev/null @@ -1,49 +0,0 @@ -// This exists for public state squashing -// -// It has advantages over an traditional map as when we delete an item, -// we can insert to the back of the map -// This filtering is inefficient and a bottleneck over large lists -// Additionally getting values implements a copy -export class OrderedMap { - map = new Map(); - keys: K[] = []; - - constructor() {} - - set(key: K, value: V) { - if (this.map.has(key)) { - // Remove the key from the keys array - this.keys = this.keys.filter(k => k !== key); - } - // Add the key to the end of the keys array - this.keys.push(key); - // Set the value in the map - this.map.set(key, value); - } - - get(key: K) { - return this.map.get(key); - } - - has(key: K) { - return this.map.has(key); - } - - delete(key: K) { - if (this.map.delete(key)) { - this.keys = this.keys.filter(k => k !== key); - return true; - } - return false; - } - - values() { - return this.keys.map(key => this.map.get(key)!); - } - - *[Symbol.iterator]() { - for (const key of this.keys) { - yield [key, this.map.get(key)]; - } - } -} diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index f6e1e7a421e..52f664df436 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -43,7 +43,6 @@ export class PublicExecutor { globalVariables: GlobalVariables, availableGas: Gas, _txContext: TxContext, - // TODO(md): this will be shared? Why do we need to pass it everywhere? pendingSiloedNullifiers: Nullifier[], transactionFee: Fr = Fr.ZERO, startSideEffectCounter: number = 0, diff --git a/yarn-project/simulator/src/public/light_public_processor.test.ts b/yarn-project/simulator/src/public/light_public_processor.test.ts deleted file mode 100644 index 5fe782e1599..00000000000 --- a/yarn-project/simulator/src/public/light_public_processor.test.ts +++ /dev/null @@ -1,697 +0,0 @@ -import { - type PublicExecutionRequest, - SimulationError, - type TreeInfo, - type TxValidator, - mockTx, -} from '@aztec/circuit-types'; -import { - AppendOnlyTreeSnapshot, - AztecAddress, - ContractStorageRead, - ContractStorageUpdateRequest, - Fr, - Gas, - GasFees, - GasSettings, - GlobalVariables, - Header, - MAX_NOTE_HASHES_PER_TX, - MAX_NULLIFIERS_PER_TX, - PUBLIC_DATA_TREE_HEIGHT, - PartialStateReference, - PublicAccumulatedDataBuilder, - PublicDataTreeLeafPreimage, - PublicDataUpdateRequest, - StateReference, -} from '@aztec/circuits.js'; -import { fr, makeSelector } from '@aztec/circuits.js/testing'; -import { padArrayEnd } from '@aztec/foundation/collection'; -import { type FieldsOf } from '@aztec/foundation/types'; -import { openTmpStore } from '@aztec/kv-store/utils'; -import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; -import { LightPublicProcessor, type PublicExecutionResult, type WorldStateDB } from '@aztec/simulator'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { type MerkleTreeOperations } from '@aztec/world-state'; - -import { jest } from '@jest/globals'; -import { type MockProxy, mock } from 'jest-mock-extended'; - -import { PublicExecutionResultBuilder, makeFunctionCall } from '../mocks/fixtures.js'; -import { type NewStateUpdates } from './light_public_processor.js'; - -describe('public_processor', () => { - let db: MockProxy; - let worldStateDB: MockProxy; - let txValidator: MockProxy>; - let publicExecutorSpy: any; - - let root: Buffer; - - let processor: LightPublicProcessor; - - beforeEach(() => { - db = mock(); - worldStateDB = mock(); - txValidator = mock>(); - - root = Buffer.alloc(32, 5); - - db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); - worldStateDB.storageRead.mockResolvedValue(Fr.ZERO); - - // Always return true for validation - txValidator.validateTxs.mockImplementation((txs: any[]) => { - return Promise.resolve([txs, []]); - }); - }); - - describe('Light Public Processor', () => { - let publicDataTree: AppendOnlyTree; - - beforeAll(async () => { - publicDataTree = await newTree( - StandardTree, - openTmpStore(), - new Poseidon(), - 'PublicData', - Fr, - PUBLIC_DATA_TREE_HEIGHT, - 1, // Add a default low leaf for the public data hints to be proved against. - ); - }); - - beforeEach(() => { - const snap = new AppendOnlyTreeSnapshot( - Fr.fromBuffer(publicDataTree.getRoot(true)), - Number(publicDataTree.getNumLeaves(true)), - ); - - const header = Header.empty(); - const stateReference = new StateReference( - header.state.l1ToL2MessageTree, - new PartialStateReference(header.state.partial.noteHashTree, header.state.partial.nullifierTree, snap), - ); - // Clone the whole state because somewhere down the line (AbstractPhaseManager) the public data root is modified in the referenced header directly :/ - header.state = StateReference.fromBuffer(stateReference.toBuffer()); - - db.getStateReference.mockResolvedValue(stateReference); - db.getSiblingPath.mockResolvedValue(publicDataTree.getSiblingPath(0n, false)); - db.getPreviousValueIndex.mockResolvedValue({ index: 0n, alreadyPresent: true }); - db.getLeafPreimage.mockResolvedValue(new PublicDataTreeLeafPreimage(new Fr(0), new Fr(0), new Fr(0), 0n)); - - processor = new LightPublicProcessor( - db, - worldStateDB, - GlobalVariables.from({ ...GlobalVariables.empty(), gasFees: GasFees.default() }), - header, - txValidator, - new NoopTelemetryClient(), - ); - - publicExecutorSpy = jest.spyOn(processor.publicExecutor, 'simulate'); - - publicExecutorSpy.mockImplementation((req: PublicExecutionRequest) => { - const result = PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: req as PublicExecutionRequest, - }).build(); - return Promise.resolve(result); - }); - }); - - it('rolls back app logic db updates on failed public execution, but persists setup', async function () { - const tx = mockTx(1, { - hasLogs: true, - numberOfNonRevertiblePublicCallRequests: 1, - numberOfRevertiblePublicCallRequests: 1, - hasPublicTeardownCallRequest: true, - }); - - const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); - const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); - const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - - const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); - const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; - - const nestedContractAddress = AztecAddress.fromBigInt(112233n); - const contractSlotA = fr(0x100); - const contractSlotB = fr(0x150); - const contractSlotC = fr(0x200); - const contractSlotD = fr(0x250); - const contractSlotE = fr(0x300); - const contractSlotF = fr(0x350); - - let simulatorCallCount = 0; - const simulatorResults: PublicExecutionResult[] = [ - // Setup - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: nonRevertibleRequests[0], - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], - }).build(), - - // App Logic - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: revertibleRequests[0], - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: revertibleRequests[0].callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 13), - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 14), - new ContractStorageUpdateRequest(contractSlotC, fr(0x200), 15), - ], - }).build(), - PublicExecutionResultBuilder.fromFunctionCall({ - from: revertibleRequests[0].contractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - revertReason: new SimulationError('Simulation Failed', []), - }).build(), - ], - }).build(), - - // Teardown - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: teardownRequest, - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 16), - new ContractStorageUpdateRequest(contractSlotD, fr(0x251), 17), - new ContractStorageUpdateRequest(contractSlotE, fr(0x301), 18), - new ContractStorageUpdateRequest(contractSlotF, fr(0x351), 19), - ], - }).build(teardownResultSettings), - ], - }).build(teardownResultSettings), - ]; - - jest.spyOn((processor as any).publicExecutor, 'simulate').mockImplementation(execution => { - if (simulatorCallCount < simulatorResults.length) { - return Promise.resolve(simulatorResults[simulatorCallCount++]); - } else { - throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); - } - }); - - await processor.process([tx]); - - expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(3); - expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); - // We should not need to roll back to the checkpoint, the nested call reverting should not - // mean that the parent call should revert! - expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); - expect(worldStateDB.commit).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - }); - - it('fails a transaction that reverts in setup', async function () { - const tx = mockTx(1, { - numberOfNonRevertiblePublicCallRequests: 1, - numberOfRevertiblePublicCallRequests: 1, - hasPublicTeardownCallRequest: true, - }); - - const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); - const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); - const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - - const nestedContractAddress = AztecAddress.fromBigInt(112233n); - const contractSlotA = fr(0x100); - const contractSlotB = fr(0x150); - const contractSlotC = fr(0x200); - - let simulatorCallCount = 0; - const simulatorResults: PublicExecutionResult[] = [ - // Setup - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: nonRevertibleRequests[0], - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: nonRevertibleRequests[0].callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), - ], - }).build(), - PublicExecutionResultBuilder.fromFunctionCall({ - from: nonRevertibleRequests[0].callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - revertReason: new SimulationError('Simulation Failed', []), - }).build(), - ], - }).build(), - - // App Logic - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: revertibleRequests[0], - }).build(), - - // Teardown - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: teardownRequest, - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], - }).build(), - ], - }).build(), - ]; - - jest.spyOn((processor as any).publicExecutor, 'simulate').mockImplementation(execution => { - if (simulatorCallCount < simulatorResults.length) { - return Promise.resolve(simulatorResults[simulatorCallCount++]); - } else { - throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); - } - }); - - await expect(async () => { - await processor.process([tx]); - }).rejects.toThrow('Reverted in setup'); - - expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(1); - - expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(0); - expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); - expect(worldStateDB.commit).toHaveBeenCalledTimes(0); - expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); - }); - - it('includes a transaction that reverts in teardown', async function () { - const tx = mockTx(1, { - hasLogs: true, - numberOfNonRevertiblePublicCallRequests: 1, - numberOfRevertiblePublicCallRequests: 1, - hasPublicTeardownCallRequest: true, - }); - - const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); - const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); - const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - - const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); - const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; - - const nestedContractAddress = AztecAddress.fromBigInt(112233n); - const contractSlotA = fr(0x100); - const contractSlotB = fr(0x150); - const contractSlotC = fr(0x200); - - let simulatorCallCount = 0; - const simulatorResults: PublicExecutionResult[] = [ - // Setup - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: nonRevertibleRequests[0], - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: nonRevertibleRequests[0].callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), - ], - }).build(), - ], - }).build(), - - // App Logic - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: revertibleRequests[0], - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), - new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 15), - ], - }).build(), - - // Teardown - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: teardownRequest, - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], - }).build(teardownResultSettings), - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 17)], - revertReason: new SimulationError('Simulation Failed', []), - }).build(teardownResultSettings), - ], - }).build(teardownResultSettings), - ]; - - jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { - if (simulatorCallCount < simulatorResults.length) { - return Promise.resolve(simulatorResults[simulatorCallCount++]); - } else { - throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); - } - }); - - await processor.process([tx]); - - expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(3); - expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(1); - expect(worldStateDB.commit).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - }); - - it('includes a transaction that reverts in app logic and teardown', async function () { - const tx = mockTx(1, { - hasLogs: true, - numberOfNonRevertiblePublicCallRequests: 1, - numberOfRevertiblePublicCallRequests: 1, - hasPublicTeardownCallRequest: true, - }); - - const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); - const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); - const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - - const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); - const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; - - const nestedContractAddress = AztecAddress.fromBigInt(112233n); - const contractSlotA = fr(0x100); - const contractSlotB = fr(0x150); - const contractSlotC = fr(0x200); - - let simulatorCallCount = 0; - const simulatorResults: PublicExecutionResult[] = [ - // Setup - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: nonRevertibleRequests[0], - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11)], - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: nonRevertibleRequests[0].callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), - ], - }).build(), - ], - }).build(), - - // App Logic - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: revertibleRequests[0], - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), - new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 15), - ], - revertReason: new SimulationError('Simulation Failed', []), - }).build(), - - // Teardown - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: teardownRequest, - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], - }).build(teardownResultSettings), - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [new ContractStorageUpdateRequest(contractSlotC, fr(0x202), 16)], - revertReason: new SimulationError('Simulation Failed', []), - }).build(teardownResultSettings), - ], - }).build(teardownResultSettings), - ]; - - jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { - if (simulatorCallCount < simulatorResults.length) { - return Promise.resolve(simulatorResults[simulatorCallCount++]); - } else { - throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); - } - }); - - const stateUpdateSpy = jest.spyOn(processor as any, 'writeStateUpdates'); - - await processor.process([tx]); - - expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(2); - expect(worldStateDB.commit).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - - const nestContractAddress = simulatorResults[0].nestedExecutions[0].executionRequest.contractAddress; - const expectedPublicDataWrites = [ - PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nonRevertibleRequests[0].callContext.storageContractAddress, - new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11), - ), - PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestContractAddress, - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12), - ), - PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestContractAddress, - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 13), - ), - ]; - // Tx hash + state updates - const expectedStateUpdatesOpject: NewStateUpdates = { - nullifiers: padArrayEnd([tx.data.getNonEmptyNullifiers()[0]], Fr.ZERO, MAX_NULLIFIERS_PER_TX), - noteHashes: padArrayEnd([], Fr.ZERO, MAX_NOTE_HASHES_PER_TX), - publicDataWrites: expectedPublicDataWrites, - }; - expect(stateUpdateSpy).toHaveBeenCalledWith(expectedStateUpdatesOpject); - }); - - it('runs a tx with all phases', async function () { - const tx = mockTx(1, { - numberOfNonRevertiblePublicCallRequests: 1, - numberOfRevertiblePublicCallRequests: 1, - hasPublicTeardownCallRequest: true, - }); - - const nonRevertibleRequests = tx.getNonRevertiblePublicExecutionRequests(); - const revertibleRequests = tx.getRevertiblePublicExecutionRequests(); - const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - - // TODO(md): gas - const gasLimits = Gas.from({ l2Gas: 1e9, daGas: 1e9 }); - const teardownGas = Gas.from({ l2Gas: 1e7, daGas: 1e7 }); - tx.data.constants.txContext.gasSettings = GasSettings.from({ - gasLimits: gasLimits, - teardownGasLimits: teardownGas, - inclusionFee: new Fr(1e4), - maxFeesPerGas: { feePerDaGas: new Fr(10), feePerL2Gas: new Fr(10) }, - }); - - // Private kernel tail to public pushes teardown gas allocation into revertible gas used - tx.data.forPublic!.end = PublicAccumulatedDataBuilder.fromPublicAccumulatedData(tx.data.forPublic!.end) - .withGasUsed(teardownGas) - .build(); - tx.data.forPublic!.endNonRevertibleData = PublicAccumulatedDataBuilder.fromPublicAccumulatedData( - tx.data.forPublic!.endNonRevertibleData, - ) - .withGasUsed(Gas.empty()) - .build(); - - const nestedContractAddress = revertibleRequests[0].callContext.storageContractAddress; - const contractSlotA = fr(0x100); - const contractSlotB = fr(0x150); - const contractSlotC = fr(0x200); - - let simulatorCallCount = 0; - - const initialGas = gasLimits.sub(teardownGas); - const setupGasUsed = Gas.from({ l2Gas: 1e6 }); - const appGasUsed = Gas.from({ l2Gas: 2e6, daGas: 2e6 }); - const teardownGasUsed = Gas.from({ l2Gas: 3e6, daGas: 3e6 }); - const afterSetupGas = initialGas.sub(setupGasUsed); - const afterAppGas = afterSetupGas.sub(appGasUsed); - const afterTeardownGas = teardownGas.sub(teardownGasUsed); - - // Inclusion fee plus block gas fees times total gas used - const expectedTxFee = 1e4 + (1e7 + 1e6 + 2e6) * 1 + (1e7 + 2e6) * 1; - const transactionFee = new Fr(expectedTxFee); - - const simulatorResults: PublicExecutionResult[] = [ - // Setup - PublicExecutionResultBuilder.fromPublicExecutionRequest({ request: nonRevertibleRequests[0] }).build({ - startGasLeft: initialGas, - endGasLeft: afterSetupGas, - }), - - // App Logic - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: revertibleRequests[0], - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 10), - new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 11), - ], - contractStorageReads: [new ContractStorageRead(contractSlotA, fr(0x100), 19)], - }).build({ - startGasLeft: afterSetupGas, - endGasLeft: afterAppGas, - }), - - // Teardown - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: teardownRequest, - nestedExecutions: [ - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x103), 16), - new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 17), - ], - contractStorageReads: [new ContractStorageRead(contractSlotA, fr(0x102), 15)], - }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), - PublicExecutionResultBuilder.fromFunctionCall({ - from: teardownRequest.callContext.storageContractAddress, - tx: makeFunctionCall('', nestedContractAddress, makeSelector(5)), - contractStorageUpdateRequests: [ - new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 13), - new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), - ], - contractStorageReads: [new ContractStorageRead(contractSlotA, fr(0x101), 12)], - }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), - ], - }).build({ - startGasLeft: teardownGas, - endGasLeft: afterTeardownGas, - transactionFee, - }), - ]; - - jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { - if (simulatorCallCount < simulatorResults.length) { - const result = simulatorResults[simulatorCallCount++]; - return Promise.resolve(result); - } else { - throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); - } - }); - - const stateUpdateSpy = jest.spyOn(processor as any, 'writeStateUpdates'); - - await processor.process([tx]); - - const expectedSimulateCall = (availableGas: Partial>, _txFee: number) => [ - expect.anything(), // PublicExecution - expect.anything(), // GlobalVariables - Gas.from(availableGas), - expect.anything(), // TxContext - expect.anything(), // pendingNullifiers - new Fr(0), - // new Fr(txFee), - expect.anything(), // SideEffectCounter - ]; - - expect(processor.publicExecutor.simulate).toHaveBeenCalledTimes(3); - expect(processor.publicExecutor.simulate).toHaveBeenNthCalledWith(1, ...expectedSimulateCall(initialGas, 0)); - expect(processor.publicExecutor.simulate).toHaveBeenNthCalledWith(2, ...expectedSimulateCall(afterSetupGas, 0)); - expect(processor.publicExecutor.simulate).toHaveBeenNthCalledWith( - 3, - ...expectedSimulateCall(teardownGas, expectedTxFee), - ); - - expect(worldStateDB.checkpoint).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); - expect(worldStateDB.commit).toHaveBeenCalledTimes(1); - expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - - const expectedPublicDataWrites = [ - PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestedContractAddress, - new ContractStorageUpdateRequest(contractSlotA, fr(0x103), 16), - ), - PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestedContractAddress, - new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 17), - ), - PublicDataUpdateRequest.fromContractStorageUpdateRequest( - nestedContractAddress, - new ContractStorageUpdateRequest(contractSlotB, fr(0x152), 14), - ), - ]; - // Tx hash + state updates - const expectedStateUpdatesOpject: NewStateUpdates = { - nullifiers: padArrayEnd([tx.data.getNonEmptyNullifiers()[0]], Fr.ZERO, MAX_NULLIFIERS_PER_TX), - noteHashes: padArrayEnd([], Fr.ZERO, MAX_NOTE_HASHES_PER_TX), - publicDataWrites: expectedPublicDataWrites, - }; - expect(stateUpdateSpy).toHaveBeenCalledWith(expectedStateUpdatesOpject); - }); - - it('runs a tx with only teardown', async function () { - const tx = mockTx(1, { - numberOfNonRevertiblePublicCallRequests: 0, - numberOfRevertiblePublicCallRequests: 0, - hasPublicTeardownCallRequest: true, - }); - - const teardownRequest = tx.getPublicTeardownExecutionRequest()!; - - const gasLimits = Gas.from({ l2Gas: 1e9, daGas: 1e9 }); - const teardownGas = Gas.from({ l2Gas: 1e7, daGas: 1e7 }); - tx.data.constants.txContext.gasSettings = GasSettings.from({ - gasLimits: gasLimits, - teardownGasLimits: teardownGas, - inclusionFee: new Fr(1e4), - maxFeesPerGas: { feePerDaGas: new Fr(10), feePerL2Gas: new Fr(10) }, - }); - - // Private kernel tail to public pushes teardown gas allocation into revertible gas used - tx.data.forPublic!.end = PublicAccumulatedDataBuilder.fromPublicAccumulatedData(tx.data.forPublic!.end) - .withGasUsed(teardownGas) - .build(); - tx.data.forPublic!.endNonRevertibleData = PublicAccumulatedDataBuilder.fromPublicAccumulatedData( - tx.data.forPublic!.endNonRevertibleData, - ) - .withGasUsed(Gas.empty()) - .build(); - - let simulatorCallCount = 0; - const txOverhead = 1e4; - const expectedTxFee = txOverhead + teardownGas.l2Gas * 1 + teardownGas.daGas * 1; - const transactionFee = new Fr(expectedTxFee); - const teardownGasUsed = Gas.from({ l2Gas: 1e6, daGas: 1e6 }); - - const simulatorResults: PublicExecutionResult[] = [ - // Teardown - PublicExecutionResultBuilder.fromPublicExecutionRequest({ - request: teardownRequest, - nestedExecutions: [], - }).build({ - startGasLeft: teardownGas, - endGasLeft: teardownGas.sub(teardownGasUsed), - transactionFee, - }), - ]; - - jest.spyOn(processor.publicExecutor, 'simulate').mockImplementation(execution => { - if (simulatorCallCount < simulatorResults.length) { - const result = simulatorResults[simulatorCallCount++]; - return Promise.resolve(result); - } else { - throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); - } - }); - - await processor.process([tx]); - }); - }); -}); diff --git a/yarn-project/simulator/src/public/light_public_processor.ts b/yarn-project/simulator/src/public/light_public_processor.ts deleted file mode 100644 index da2637675d7..00000000000 --- a/yarn-project/simulator/src/public/light_public_processor.ts +++ /dev/null @@ -1,458 +0,0 @@ -// A minimal version of the public processor - that does not have the fluff -import { MerkleTreeId, Tx, type TxValidator } from '@aztec/circuit-types'; -import { - Gas, - type GlobalVariables, - type Header, - MAX_NOTE_HASHES_PER_TX, - MAX_NULLIFIERS_PER_TX, - MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - NULLIFIER_SUBTREE_HEIGHT, - type Nullifier, - PUBLIC_DATA_SUBTREE_HEIGHT, - PublicDataTreeLeaf, - PublicDataUpdateRequest, - type ScopedNoteHash, - type ScopedNullifier, - getNonEmptyItems, -} from '@aztec/circuits.js'; -import { siloNoteHash, siloNullifier } from '@aztec/circuits.js/hash'; -import { makeTuple } from '@aztec/foundation/array'; -import { OrderedMap, padArrayEnd } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/fields'; -import { type TelemetryClient } from '@aztec/telemetry-client'; -import { type MerkleTreeOperations } from '@aztec/world-state'; - -import { type PublicExecutionResult } from './execution.js'; -import { PublicExecutor } from './executor.js'; -import { type WorldStateDB } from './public_db_sources.js'; - -export class InvalidTransactionsFound extends Error { - constructor() { - super('Double spend tx found'); - } -} - -export type NewStateUpdates = { - nullifiers: Fr[]; - noteHashes: Fr[]; - publicDataWrites: PublicDataUpdateRequest[]; -}; - -/** - * A variant of the public processor that does not run the kernel circuits - * - * TODO(make issues): - * - Gas accounting is not complete - https://github.com/AztecProtocol/aztec-packages/issues/8962 - * - Calculating the state root (archive) is not complete - https://github.com/AztecProtocol/aztec-packages/issues/8961 - */ -export class LightPublicProcessor { - public publicExecutor: PublicExecutor; - - // State - private pendingNullifiers: Nullifier[]; - - constructor( - private merkleTrees: MerkleTreeOperations, - private worldStateDB: WorldStateDB, - private globalVariables: GlobalVariables, - historicalHeader: Header, - private txValidator: TxValidator, - telemetryClient: TelemetryClient, - ) { - this.publicExecutor = new PublicExecutor(worldStateDB, historicalHeader, telemetryClient); - this.pendingNullifiers = []; - } - - addNullifiers(nullifiers: Nullifier[]) { - this.pendingNullifiers.push(...nullifiers); - } - - public getTreeSnapshots() { - return Promise.all([ - this.merkleTrees.getTreeInfo(MerkleTreeId.NULLIFIER_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), - this.merkleTrees.getTreeInfo(MerkleTreeId.ARCHIVE), - ]); - } - - /** - * Process a list of transactions - * - * If any of the transactions are invalid, then we throw an error - * @param txs - The transactions to process - */ - public async process(txs: Tx[]) { - // TODO(md): do we need dummy transactions? - txs = txs.map(tx => Tx.clone(tx)); - - await this.validateTransactions(txs); - - for (const tx of txs) { - if (tx.hasPublicCalls()) { - await this.executeEnqueuedCallsAndApplyStateUpdates(tx); - } else { - await this.applyPrivateStateUpdates(tx); - - // TODO(md): do i need to do this? - // Apply empty public data writes - const emptyPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, () => - PublicDataTreeLeaf.empty(), - ); - await this.merkleTrees.batchInsert( - MerkleTreeId.PUBLIC_DATA_TREE, - emptyPublicDataWrites.map(x => x.toBuffer()), - PUBLIC_DATA_SUBTREE_HEIGHT, - ); - } - } - } - - /** - * Validate the transactions - * - * If any of the transactions are invalid, we throw an error - * Halting block validation - * @param txs - The transactions to validate - */ - async validateTransactions(txs: Tx[]) { - const [_, invalidTxs] = await this.txValidator.validateTxs(txs); - - if (invalidTxs.length > 0) { - throw new InvalidTransactionsFound(); - } - } - - /** - * Execute the public calls and apply the state updates - * - * If the transaction has reverted in setup, we throw an error - * @param tx - The transaction to execute - */ - async executeEnqueuedCallsAndApplyStateUpdates(tx: Tx) { - const publicExecutionResults = await this.executePublicCalls(tx); - - // The transaction has reverted in setup - the block should fail - if (!publicExecutionResults) { - throw new Error('Reverted in setup'); - } - - if (tx.data.forPublic) { - const stateUpdates = this.accumulateTransactionAndExecutedStateUpdates(tx, publicExecutionResults); - await this.writeStateUpdates(stateUpdates); - } else { - throw new Error('Public transaction did have public data'); - } - } - - /** - * Take the state updates from each of the transactions and merge them together - * - * 1. Non Revertible calls come first - * 2. Private updates come second - * 3. Public updates come third - */ - accumulateTransactionAndExecutedStateUpdates(tx: Tx, publicExecutionResults: NewStateUpdates) { - const { nullifiers, noteHashes, publicDataWrites } = publicExecutionResults; - const { - nullifiers: nonRevertibleNullifiers, - noteHashes: nonRevertibleNoteHashes, - publicDataUpdateRequests: nonRevertiblePublicDataUpdateRequests, - } = tx.data.forPublic!.endNonRevertibleData; - const { - nullifiers: txNullifiers, - noteHashes: txNoteHashes, - publicDataUpdateRequests: txPublicDataUpdateRequests, - } = tx.data.forPublic!.end; - - const nonEmptyNonRevertibleNullifiers = getNonEmptyItems(nonRevertibleNullifiers).map(n => n.value); - const nonEmptyTxNullifiers = getNonEmptyItems(txNullifiers).map(n => n.value); - const publicNullifiers = getNonEmptyItems(nullifiers); - - const nonEmptyNonRevertibleNoteHashes = getNonEmptyItems(nonRevertibleNoteHashes).map(n => n.value); - const nonEmptyTxNoteHashes = getNonEmptyItems(txNoteHashes).map(n => n.value); - const publicNoteHashes = getNonEmptyItems(noteHashes); - - const nonEmptyTxPublicDataUpdateRequests = txPublicDataUpdateRequests.filter(p => !p.isEmpty()); - const nonEmptyNonRevertiblePublicDataUpdateRequests = nonRevertiblePublicDataUpdateRequests.filter( - p => !p.isEmpty(), - ); - const nonEmptyPublicDataWrites = publicDataWrites.filter(p => !p.isEmpty()); - - const allNullifiers = padArrayEnd( - [...nonEmptyNonRevertibleNullifiers, ...nonEmptyTxNullifiers, ...publicNullifiers], - Fr.ZERO, - MAX_NULLIFIERS_PER_TX, - ); - const allNoteHashes = padArrayEnd( - [...nonEmptyNonRevertibleNoteHashes, ...nonEmptyTxNoteHashes, ...publicNoteHashes], - Fr.ZERO, - MAX_NOTE_HASHES_PER_TX, - ); - const allPublicDataUpdateRequests = [ - ...nonEmptyNonRevertiblePublicDataUpdateRequests, - ...nonEmptyTxPublicDataUpdateRequests, - ...nonEmptyPublicDataWrites, - ]; - - return { - nullifiers: allNullifiers, - noteHashes: allNoteHashes, - publicDataWrites: allPublicDataUpdateRequests, - }; - } - - async writeStateUpdates(stateUpdates: NewStateUpdates) { - const { nullifiers, noteHashes, publicDataWrites } = stateUpdates; - - // Convert public state toto the tree leaves - const allPublicDataWrites = publicDataWrites.map( - ({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue), - ); - - await this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); - await this.merkleTrees.batchInsert( - MerkleTreeId.NULLIFIER_TREE, - nullifiers.map(n => n.toBuffer()), - NULLIFIER_SUBTREE_HEIGHT, - ); - await this.merkleTrees.batchInsert( - MerkleTreeId.PUBLIC_DATA_TREE, - allPublicDataWrites.map(x => x.toBuffer()), - PUBLIC_DATA_SUBTREE_HEIGHT, - ); - } - - public async executePublicCalls(tx: Tx) { - // Transactions are split up into a number of parts - // 1. Non revertible calls - these run with the public kernel setup - // - This includes fee payment transactions - // 2. Public Calls - These are the the noir code that are enqueued in the tx by the users - // - This runs with the public kernel app logic - // 3. Teardown Call - This is the public teardown call that does fee refunds - // - This runs with the public kernel teardown - await this.worldStateDB.addNewContracts(tx); - - // Call stacks - const nonRevertibleCalls = tx.getNonRevertiblePublicExecutionRequests(); - const publicCalls = tx.getRevertiblePublicExecutionRequests(); - const teardownCall = tx.getPublicTeardownExecutionRequest()!; - - // Gas - const teardownGasLimit = tx.data.constants.txContext.gasSettings.getTeardownLimits(); - let gasRemaining = tx.data.constants.txContext.gasSettings.getLimits().sub(teardownGasLimit); - - // Store the successful results for db insertions - const nonRevertiblePublicExecutionResults: PublicExecutionResult[] = []; - const publicExecutionResults: PublicExecutionResult[] = []; - - // Get side effect counters - const publicKernelOutput = tx.data.toPublicKernelCircuitPublicInputs(); - let startSideEffectCounter = publicKernelOutput.endSideEffectCounter + 1; - - // Execute the non revertible calls - for (const call of nonRevertibleCalls) { - const res = await this.publicExecutor.simulate( - call, - this.globalVariables, - gasRemaining, - tx.data.constants.txContext, - this.pendingNullifiers, - Fr.ZERO, - startSideEffectCounter, - ); - startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; - gasRemaining = Gas.from(res.endGasLeft); - - if (!this.temporaryDidCallOrNestedCallRevert(res)) { - this.addNullifiers(res.nullifiers); - nonRevertiblePublicExecutionResults.push(res); - await this.worldStateDB.checkpoint(); - } else { - await this.worldStateDB.removeNewContracts(tx); - await this.worldStateDB.rollbackToCommit(); - - return; - } - } - - for (const call of publicCalls) { - // Execute the non revertible calls - const res = await this.publicExecutor.simulate( - call, - this.globalVariables, - gasRemaining, - tx.data.constants.txContext, - this.pendingNullifiers, - Fr.ZERO, - startSideEffectCounter, - ); - startSideEffectCounter = Number(res.endSideEffectCounter.toBigInt()) + 1; - gasRemaining = Gas.from(res.endGasLeft); - - if (!this.temporaryDidCallOrNestedCallRevert(res)) { - this.addNullifiers(res.nullifiers); - publicExecutionResults.push(res); - } else { - // Similarly, if a teardown call fails, it will revert - // back to the setup state - await this.worldStateDB.removeNewContracts(tx); - await this.worldStateDB.rollbackToCheckpoint(); - } - } - - if (teardownCall) { - const res = await this.publicExecutor.simulate( - teardownCall, - this.globalVariables, - teardownGasLimit, - tx.data.constants.txContext, - this.pendingNullifiers, - Fr.ZERO, - startSideEffectCounter, - ); - - if (!this.temporaryDidCallOrNestedCallRevert(res)) { - this.addNullifiers(res.nullifiers); - publicExecutionResults.push(res); - } else { - // Similarly, if a public calls fail, it will revert - // back to the setup state - await this.worldStateDB.removeNewContracts(tx); - await this.worldStateDB.rollbackToCheckpoint(); - } - } - - await this.worldStateDB.commit(); - - // Sweep up performed state updates - return this.aggregateResults(nonRevertiblePublicExecutionResults, publicExecutionResults); - } - - aggregateResults(nonRevertibleResults: PublicExecutionResult[], revertibleResults: PublicExecutionResult[]) { - const nonRevertibleNestedExecutions = nonRevertibleResults.flatMap(res => this.collectNestedExecutions(res)); - const nonRevertible = this.aggregatePublicExecutionResults(nonRevertibleNestedExecutions); - - const revertibleNestedExecutions = revertibleResults.flatMap(res => this.collectNestedExecutions(res)); - const revertible = this.aggregatePublicExecutionResults(revertibleNestedExecutions); - - return { - nullifiers: [...nonRevertible.nullifiers, ...revertible.nullifiers], - noteHashes: [...nonRevertible.newNoteHashes, ...revertible.newNoteHashes], - publicDataWrites: [...nonRevertible.publicDataWrites, ...revertible.publicDataWrites], - }; - } - - aggregatePublicExecutionResults(results: PublicExecutionResult[]) { - const txCallNewNullifiers: ScopedNullifier[][] = []; - const txCallNewNoteHashes: ScopedNoteHash[][] = []; - const txCallPublicDataWrites: PublicDataUpdateRequest[][] = []; - - for (const res of results) { - // Scope the nullifiers, note hashes and public data writes to the contract address - txCallNewNullifiers.push( - getNonEmptyItems(res.nullifiers).map(n => n.scope(res.executionRequest.contractAddress)), - ); - txCallNewNoteHashes.push( - getNonEmptyItems(res.noteHashes).map(n => n.scope(res.executionRequest.contractAddress)), - ); - txCallPublicDataWrites.push( - getNonEmptyItems(res.contractStorageUpdateRequests).map(req => - PublicDataUpdateRequest.fromContractStorageUpdateRequest(res.executionRequest.contractAddress, req), - ), - ); - } - - // Reverse - const newNullifiers = txCallNewNullifiers.flat(); - const newNoteHashes = txCallNewNoteHashes.flat(); - const newPublicDataWrites = txCallPublicDataWrites.flat(); - - // Squash data writes - const uniquePublicDataWrites = this.removeDuplicatesFromStart(newPublicDataWrites); - - const returning = { - nullifiers: newNullifiers.map(n => siloNullifier(n.contractAddress, n.value)), - newNoteHashes: newNoteHashes.map(n => siloNoteHash(n.contractAddress, n.value)), - publicDataWrites: uniquePublicDataWrites, - }; - return returning; - } - - collectNestedExecutions(result: PublicExecutionResult) { - const nestedExecutions: PublicExecutionResult[] = []; - for (const res of result.nestedExecutions) { - nestedExecutions.push(...this.collectNestedExecutions(res)); - } - return [result, ...nestedExecutions]; - } - - // There is an assumption based on how the block builder works, that the transactions - // provided here CANNOT revert, else they are not added to the block as the kernels - // will fail - // This will change whenever the vm is changed to be able to revert - public async applyPrivateStateUpdates(tx: Tx) { - const insertionPromises: Promise[] = []; - if (tx.data.forRollup) { - const { nullifiers, noteHashes } = tx.data.forRollup.end; - if (nullifiers) { - insertionPromises.push( - this.merkleTrees.batchInsert( - MerkleTreeId.NULLIFIER_TREE, - nullifiers.map(n => n.toBuffer()), - NULLIFIER_SUBTREE_HEIGHT, - ), - ); - } - - if (noteHashes) { - insertionPromises.push(this.merkleTrees.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes)); - } - } - - await Promise.all(insertionPromises); - } - - /** - * Remove duplicates keeping the oldest item, where duplicates are defined by the leaf slot and side effect counter - * @param items - * @returns - */ - removeDuplicatesFromStart(items: PublicDataUpdateRequest[]) { - const slotMap: OrderedMap = new OrderedMap(); - - for (const obj of items) { - const { leafSlot, sideEffectCounter } = obj; - - if (!slotMap.has(leafSlot.toBigInt())) { - slotMap.set(leafSlot.toBigInt(), obj); - } - if (sideEffectCounter > slotMap.get(leafSlot.toBigInt())!.sideEffectCounter) { - // Remove the first instance - slotMap.delete(leafSlot.toBigInt()); - slotMap.set(leafSlot.toBigInt(), obj); - } - } - - return Array.from(slotMap.values()); - } - - // This is a temporary method, in our current kernel model, - // nested calls will trigger an entire execution reversion - // if any nested call reverts - temporaryDidCallOrNestedCallRevert(result: PublicExecutionResult) { - if (result.revertReason) { - return true; - } - if (result.nestedExecutions.length > 0) { - for (const nested of result.nestedExecutions) { - if (this.temporaryDidCallOrNestedCallRevert(nested)) { - return true; - } - } - } - return false; - } -} From 0a591c38de813ab3239b2dd7b5b8f290af11a6d1 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 5 Nov 2024 20:46:09 +0000 Subject: [PATCH 087/123] feat: re-ex with shared block building logic --- .../aztec-node/src/aztec-node/server.ts | 2 +- .../src/e2e_p2p/gossip_network.test.ts | 3 + .../end-to-end/src/e2e_p2p/p2p_network.ts | 1 + .../end-to-end/src/e2e_p2p/reex.test.ts | 5 +- yarn-project/end-to-end/src/e2e_p2p/shared.ts | 27 +- .../foundation/src/collection/index.ts | 2 +- .../src/block_builder/src/index.ts | 7 + .../src/block_builder/src/light.test.ts | 351 ++++++++++++++++++ .../src/block_builder/src/light.ts | 91 +++++ .../src/block_builder/src/orchestrator.ts | 43 +++ yarn-project/sequencer-client/src/index.ts | 3 + .../src/sequencer/sequencer.ts | 105 ++++-- yarn-project/simulator/src/public/index.ts | 1 - yarn-project/telemetry-client/src/metrics.ts | 2 + yarn-project/validator-client/src/config.ts | 2 +- .../duties/light_public_processor_factory.ts | 46 --- .../src/errors/validator.error.ts | 4 +- yarn-project/validator-client/src/factory.ts | 14 +- yarn-project/validator-client/src/metrics.ts | 27 ++ .../validator-client/src/validator.test.ts | 27 +- .../validator-client/src/validator.ts | 133 +++---- yarn-project/yarn.lock | 16 +- 22 files changed, 712 insertions(+), 200 deletions(-) create mode 100644 yarn-project/sequencer-client/src/block_builder/src/index.ts create mode 100644 yarn-project/sequencer-client/src/block_builder/src/light.test.ts create mode 100644 yarn-project/sequencer-client/src/block_builder/src/light.ts create mode 100644 yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts delete mode 100644 yarn-project/validator-client/src/duties/light_public_processor_factory.ts create mode 100644 yarn-project/validator-client/src/metrics.ts diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 9b77d1326b0..5fcdc048252 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -160,7 +160,7 @@ export class AztecNodeService implements AztecNode { const simulationProvider = await createSimulationProvider(config, log); - const validatorClient = createValidatorClient(config, p2pClient, worldStateSynchronizer, archiver, telemetry); + const validatorClient = createValidatorClient(config, p2pClient, telemetry); // now create the sequencer const sequencer = config.disableValidator diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index b59a5a7ed8b..fe3f23db2e4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -38,6 +38,9 @@ describe('e2e_p2p_network', () => { if (!t.bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); } + + t.ctx.aztecNodeConfig.validatorReEx = true; + // create our network of nodes and submit txs into each of them // the number of txs per node and the number of txs per rollup // should be set so that the only way for rollups to be built diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index aeb67a93e08..8e60fc94c8a 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -6,6 +6,7 @@ import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; import { SpamContract } from '@aztec/noir-contracts.js'; import { type BootstrapNode } from '@aztec/p2p'; +import { createBootstrapNodeFromPrivateKey } from '@aztec/p2p/mocks'; import getPort from 'get-port'; import { getContract } from 'viem'; diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index a3f79c4bcbe..f91f3dd390e 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -1,5 +1,5 @@ import { type AztecNodeService } from '@aztec/aztec-node'; -import { sleep } from '@aztec/aztec.js'; +import { type SentTx, sleep } from '@aztec/aztec.js'; import { beforeAll, describe, it } from '@jest/globals'; import fs from 'fs'; @@ -58,7 +58,6 @@ describe('e2e_p2p_reex', () => { // wait a bit for peers to discover each other await sleep(4000); - // tODO: use a tx with nested calls nodes.forEach(node => { node.getSequencer()?.updateSequencerConfig({ minTxsPerBlock: NUM_TXS_PER_NODE, @@ -69,7 +68,7 @@ describe('e2e_p2p_reex', () => { // now ensure that all txs were successfully mined await Promise.all( - txs.map(async (tx, i) => { + txs.map(async (tx: SentTx, i: number) => { t.logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); return tx.wait({ timeout: WAIT_FOR_TX_TIMEOUT }); }), diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index 9b787eaa564..d1c35dfdb66 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -1,12 +1,37 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeService } from '@aztec/aztec-node'; -import { type DebugLogger } from '@aztec/aztec.js'; +import { type DebugLogger, type SentTx } from '@aztec/aztec.js'; import { CompleteAddress, TxStatus } from '@aztec/aztec.js'; import { Fr, GrumpkinScalar } from '@aztec/foundation/fields'; +import { type SpamContract } from '@aztec/noir-contracts.js'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; import { type NodeContext } from '../fixtures/setup_p2p_test.js'; +// submits a set of transactions to the provided Private eXecution Environment (PXE) +export const submitComplexTxsTo = async (logger: DebugLogger, spamContract: SpamContract, numTxs: number) => { + const txs: SentTx[] = []; + + const seed = 1234n; + const spamCount = 15; + for (let i = 0; i < numTxs; i++) { + const tx = spamContract.methods.spam(seed + BigInt(i * spamCount), spamCount, false).send(); + const txHash = await tx.getTxHash(); + + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; +}; + // creates an instance of the PXE and submit a given number of transactions to it. export const createPXEServiceAndSubmitTransactions = async ( logger: DebugLogger, diff --git a/yarn-project/foundation/src/collection/index.ts b/yarn-project/foundation/src/collection/index.ts index 63a52414e69..00f8115dd60 100644 --- a/yarn-project/foundation/src/collection/index.ts +++ b/yarn-project/foundation/src/collection/index.ts @@ -1,2 +1,2 @@ export * from './array.js'; -export * from './object.js'; \ No newline at end of file +export * from './object.js'; diff --git a/yarn-project/sequencer-client/src/block_builder/src/index.ts b/yarn-project/sequencer-client/src/block_builder/src/index.ts new file mode 100644 index 00000000000..c6c151edcc1 --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/src/index.ts @@ -0,0 +1,7 @@ +import { type BlockBuilder, type MerkleTreeReadOperations } from '@aztec/circuit-types'; + +export * from './orchestrator.js'; +export * from './light.js'; +export interface BlockBuilderFactory { + create(db: MerkleTreeReadOperations): BlockBuilder; +} diff --git a/yarn-project/sequencer-client/src/block_builder/src/light.test.ts b/yarn-project/sequencer-client/src/block_builder/src/light.test.ts new file mode 100644 index 00000000000..b909191f070 --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/src/light.test.ts @@ -0,0 +1,351 @@ +import { TestCircuitProver } from '@aztec/bb-prover'; +import { + MerkleTreeId, + type MerkleTreeWriteOperations, + type ProcessedTx, + type ServerCircuitProver, + makeEmptyProcessedTx, +} from '@aztec/circuit-types'; +import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; +import { + type AppendOnlyTreeSnapshot, + type BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BlockRootRollupInputs, + Fr, + type GlobalVariables, + L1_TO_L2_MSG_SUBTREE_HEIGHT, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + MembershipWitness, + MergeRollupInputs, + NESTED_RECURSIVE_PROOF_LENGTH, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NUM_BASE_PARITY_PER_ROOT_PARITY, + type ParityPublicInputs, + PreviousRollupData, + PrivateBaseRollupInputs, + PrivateTubeData, + type RecursiveProof, + RootParityInput, + RootParityInputs, + TUBE_VK_INDEX, + VK_TREE_HEIGHT, + type VerificationKeyAsFields, + VkWitnessData, + makeEmptyRecursiveProof, +} from '@aztec/circuits.js'; +import { makeGlobalVariables } from '@aztec/circuits.js/testing'; +import { padArrayEnd, times } from '@aztec/foundation/collection'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { type Tuple, assertLength } from '@aztec/foundation/serialize'; +import { + ProtocolCircuitVks, + TubeVk, + getVKIndex, + getVKSiblingPath, + getVKTreeRoot, +} from '@aztec/noir-protocol-circuits-types'; +import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; +import { + buildBaseRollupHints, + buildHeaderFromCircuitOutputs, + getRootTreeSiblingPath, + getSubtreeSiblingPath, + getTreeSnapshot, +} from '@aztec/prover-client/helpers'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { type MerkleTreeAdminDatabase, NativeWorldStateService } from '@aztec/world-state'; + +import { jest } from '@jest/globals'; + +import { LightweightBlockBuilder } from './light.js'; + +jest.setTimeout(50_000); + +describe('LightBlockBuilder', () => { + let simulator: ServerCircuitProver; + let logger: DebugLogger; + let globals: GlobalVariables; + let l1ToL2Messages: Fr[]; + let vkRoot: Fr; + + let db: MerkleTreeAdminDatabase; + let fork: MerkleTreeWriteOperations; + let expectsFork: MerkleTreeWriteOperations; + let builder: LightweightBlockBuilder; + + let emptyProof: RecursiveProof; + + beforeAll(async () => { + logger = createDebugLogger('aztec:sequencer-client:test:block-builder'); + simulator = new TestCircuitProver(new NoopTelemetryClient()); + vkRoot = getVKTreeRoot(); + emptyProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH); + db = await NativeWorldStateService.tmp(); + }); + + beforeEach(async () => { + globals = makeGlobalVariables(1, { chainId: Fr.ZERO, version: Fr.ZERO }); + l1ToL2Messages = times(7, i => new Fr(i + 1)); + fork = await db.fork(); + expectsFork = await db.fork(); + builder = new LightweightBlockBuilder(fork, new NoopTelemetryClient()); + }); + + afterEach(async () => { + await fork.close(); + await expectsFork.close(); + }); + + afterAll(async () => { + await db.close(); + }); + + it('builds a 2 tx header', async () => { + const txs = times(2, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 3 tx header', async () => { + const txs = times(3, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const merge = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + return Promise.resolve([merge, rollupOutputs[2]]); + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 4 tx header', async () => { + const txs = times(4, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); + return [mergeLeft, mergeRight]; + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 4 tx header with no l1 to l2 messages', async () => { + const l1ToL2Messages: Fr[] = []; + const txs = times(4, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); + return [mergeLeft, mergeRight]; + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 5 tx header', async () => { + const txs = times(5, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const merge10 = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + const merge11 = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); + const merge20 = await getMergeOutput(merge10, merge11); + return [merge20, rollupOutputs[4]]; + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a single tx header', async () => { + const txs = times(1, i => makeBloatedProcessedTx(fork, vkRoot, protocolContractTreeRoot, i)); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); + + expect(header).toEqual(expectedHeader); + }); + + it('builds an empty header', async () => { + const txs: ProcessedTx[] = []; + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); + + expect(header).toEqual(expectedHeader); + }); + + // Makes a tx with a non-zero inclusion fee for testing + const makeTx = (i: number) => + makeBloatedProcessedTx(fork, vkRoot, protocolContractTreeRoot, i, { inclusionFee: new Fr(i) }); + + // Builds the block header using the ts block builder + const buildHeader = async (txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { + const txCount = Math.max(2, txs.length); + await builder.startNewBlock(txCount, globals, l1ToL2Messages); + for (const tx of txs) { + await builder.addNewTx(tx); + } + const { header } = await builder.setBlockCompleted(); + return header; + }; + + // Builds the block header using circuit outputs + // Requires a callback for manually assembling the merge rollup tree + const buildExpectedHeader = async ( + txs: ProcessedTx[], + l1ToL2Messages: Fr[], + getTopMerges?: ( + rollupOutputs: BaseOrMergeRollupPublicInputs[], + ) => Promise<[BaseOrMergeRollupPublicInputs, BaseOrMergeRollupPublicInputs]>, + ) => { + if (txs.length <= 2) { + // Pad if we don't have enough txs + txs = [ + ...txs, + ...times(2 - txs.length, () => + makeEmptyProcessedTx( + expectsFork.getInitialHeader(), + globals.chainId, + globals.version, + vkRoot, + protocolContractTreeRoot, + ), + ), + ]; + // No need to run a merge if there's 0-2 txs + getTopMerges = rollupOutputs => Promise.resolve([rollupOutputs[0], rollupOutputs[1]]); + } + + const rollupOutputs = await getPrivateBaseRollupOutputs(txs); + const [mergeLeft, mergeRight] = await getTopMerges!(rollupOutputs); + const l1ToL2Snapshot = await getL1ToL2Snapshot(l1ToL2Messages); + const parityOutput = await getParityOutput(l1ToL2Messages); + const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); + const rootOutput = await getBlockRootOutput(mergeLeft, mergeRight, parityOutput, l1ToL2Snapshot); + const expectedHeader = buildHeaderFromCircuitOutputs( + [mergeLeft, mergeRight], + parityOutput, + rootOutput, + messageTreeSnapshot, + logger, + ); + + expect(expectedHeader.hash()).toEqual(rootOutput.endBlockHash); + return expectedHeader; + }; + + const getL1ToL2Snapshot = async (msgs: Fr[]) => { + const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + + const newL1ToL2MessageTreeRootSiblingPath = padArrayEnd( + await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, expectsFork), + Fr.ZERO, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + ); + + const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); + return { messageTreeSnapshot, newL1ToL2MessageTreeRootSiblingPath, l1ToL2Messages }; + }; + + const getPrivateBaseRollupOutputs = async (txs: ProcessedTx[]) => { + const rollupOutputs = []; + for (const tx of txs) { + const vkIndex = TUBE_VK_INDEX; + const vkPath = getVKSiblingPath(vkIndex); + const vkData = new VkWitnessData(TubeVk, vkIndex, vkPath); + const tubeData = new PrivateTubeData(tx.data, emptyProof, vkData); + const hints = await buildBaseRollupHints(tx, globals, expectsFork); + const inputs = new PrivateBaseRollupInputs(tubeData, hints); + const result = await simulator.getPrivateBaseRollupProof(inputs); + rollupOutputs.push(result.inputs); + } + return rollupOutputs; + }; + + const getMergeOutput = async (left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) => { + const baseRollupVk = ProtocolCircuitVks['PrivateBaseRollupArtifact'].keyAsFields; + const baseRollupVkWitness = getVkMembershipWitness(baseRollupVk); + const leftInput = new PreviousRollupData(left, emptyProof, baseRollupVk, baseRollupVkWitness); + const rightInput = new PreviousRollupData(right, emptyProof, baseRollupVk, baseRollupVkWitness); + const inputs = new MergeRollupInputs([leftInput, rightInput]); + const result = await simulator.getMergeRollupProof(inputs); + return result.inputs; + }; + + const getParityOutput = async (msgs: Fr[]) => { + const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + await expectsFork.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2Messages); + + const rootParityInputs: RootParityInput[] = []; + const baseParityVk = ProtocolCircuitVks['BaseParityArtifact'].keyAsFields; + const baseParityVkWitness = getVkMembershipWitness(baseParityVk); + for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) { + const input = BaseParityInputs.fromSlice(l1ToL2Messages, i, vkRoot); + const { publicInputs } = await simulator.getBaseParityProof(input); + const rootInput = new RootParityInput(emptyProof, baseParityVk, baseParityVkWitness.siblingPath, publicInputs); + rootParityInputs.push(rootInput); + } + + const rootParityInput = new RootParityInputs(assertLength(rootParityInputs, NUM_BASE_PARITY_PER_ROOT_PARITY)); + const result = await simulator.getRootParityProof(rootParityInput); + return result.publicInputs; + }; + + const getBlockRootOutput = async ( + left: BaseOrMergeRollupPublicInputs, + right: BaseOrMergeRollupPublicInputs, + parityOutput: ParityPublicInputs, + l1ToL2Snapshot: { + l1ToL2Messages: Tuple; + newL1ToL2MessageTreeRootSiblingPath: Tuple; + messageTreeSnapshot: AppendOnlyTreeSnapshot; + }, + ) => { + const mergeRollupVk = ProtocolCircuitVks['MergeRollupArtifact'].keyAsFields; + const mergeRollupVkWitness = getVkMembershipWitness(mergeRollupVk); + + const rollupLeft = new PreviousRollupData(left, emptyProof, mergeRollupVk, mergeRollupVkWitness); + const rollupRight = new PreviousRollupData(right, emptyProof, mergeRollupVk, mergeRollupVkWitness); + const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, expectsFork); + const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, expectsFork); + const previousBlockHashLeafIndex = BigInt(startArchiveSnapshot.nextAvailableLeafIndex - 1); + const previousBlockHash = (await expectsFork.getLeafValue(MerkleTreeId.ARCHIVE, previousBlockHashLeafIndex))!; + + const rootParityVk = ProtocolCircuitVks['RootParityArtifact'].keyAsFields; + const rootParityVkWitness = getVkMembershipWitness(rootParityVk); + + const rootParityInput = new RootParityInput( + emptyProof, + rootParityVk, + rootParityVkWitness.siblingPath, + parityOutput, + ); + + const inputs = BlockRootRollupInputs.from({ + previousRollupData: [rollupLeft, rollupRight], + l1ToL2Roots: rootParityInput, + newL1ToL2Messages: l1ToL2Snapshot.l1ToL2Messages, + newL1ToL2MessageTreeRootSiblingPath: l1ToL2Snapshot.newL1ToL2MessageTreeRootSiblingPath, + startL1ToL2MessageTreeSnapshot: l1ToL2Snapshot.messageTreeSnapshot, + startArchiveSnapshot, + newArchiveSiblingPath, + previousBlockHash, + proverId: Fr.ZERO, + }); + + const result = await simulator.getBlockRootRollupProof(inputs); + return result.inputs; + }; + + function getVkMembershipWitness(vk: VerificationKeyAsFields) { + const leafIndex = getVKIndex(vk); + return new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)); + } +}); diff --git a/yarn-project/sequencer-client/src/block_builder/src/light.ts b/yarn-project/sequencer-client/src/block_builder/src/light.ts new file mode 100644 index 00000000000..dc41539a486 --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/src/light.ts @@ -0,0 +1,91 @@ +import { createDebugLogger } from '@aztec/aztec.js'; +import { + type BlockBuilder, + Body, + L2Block, + MerkleTreeId, + type MerkleTreeWriteOperations, + type ProcessedTx, + type TxEffect, + makeEmptyProcessedTx, + toTxEffect, +} from '@aztec/circuit-types'; +import { Fr, type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { padArrayEnd } from '@aztec/foundation/collection'; +import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; +import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; +import { buildBaseRollupHints, buildHeaderFromTxEffects, getTreeSnapshot } from '@aztec/prover-client/helpers'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +/** + * Builds a block and its header from a set of processed tx without running any circuits. + */ +export class LightweightBlockBuilder implements BlockBuilder { + private numTxs?: number; + private globalVariables?: GlobalVariables; + private l1ToL2Messages?: Fr[]; + + private readonly txs: ProcessedTx[] = []; + + private readonly logger = createDebugLogger('aztec:sequencer-client:block_builder_light'); + + constructor(private db: MerkleTreeWriteOperations, private telemetry: TelemetryClient) {} + + async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { + this.logger.verbose('Starting new block', { numTxs, globalVariables: globalVariables.toJSON(), l1ToL2Messages }); + this.numTxs = numTxs; + this.globalVariables = globalVariables; + this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + + // Update L1 to L2 tree + await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!); + } + + async addNewTx(tx: ProcessedTx): Promise { + this.logger.verbose('Adding new tx to block', { txHash: tx.hash.toString() }); + this.txs.push(tx); + await buildBaseRollupHints(tx, this.globalVariables!, this.db); + } + + async setBlockCompleted(): Promise { + const paddingTxCount = this.numTxs! - this.txs.length; + this.logger.verbose(`Setting block as completed and adding ${paddingTxCount} padding txs`); + for (let i = 0; i < paddingTxCount; i++) { + await this.addNewTx( + makeEmptyProcessedTx( + this.db.getInitialHeader(), + this.globalVariables!.chainId, + this.globalVariables!.version, + getVKTreeRoot(), + protocolContractTreeRoot, + ), + ); + } + + return this.buildBlock(); + } + + private async buildBlock(): Promise { + this.logger.verbose(`Finalising block`); + const nonEmptyTxEffects: TxEffect[] = this.txs + .map(tx => toTxEffect(tx, this.globalVariables!.gasFees)) + .filter(txEffect => !txEffect.isEmpty()); + const body = new Body(nonEmptyTxEffects); + const header = await buildHeaderFromTxEffects(body, this.globalVariables!, this.l1ToL2Messages!, this.db); + + await this.db.updateArchive(header); + const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db); + + const block = new L2Block(newArchive, header, body); + return block; + } +} + +export class LightweightBlockBuilderFactory { + constructor(private telemetry?: TelemetryClient) {} + + create(db: MerkleTreeWriteOperations): BlockBuilder { + return new LightweightBlockBuilder(db, this.telemetry ?? new NoopTelemetryClient()); + } +} diff --git a/yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts b/yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts new file mode 100644 index 00000000000..862963f10fe --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts @@ -0,0 +1,43 @@ +import { TestCircuitProver } from '@aztec/bb-prover'; +import { + type BlockBuilder, + type L2Block, + type MerkleTreeWriteOperations, + type ProcessedTx, +} from '@aztec/circuit-types'; +import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; +import { ProvingOrchestrator } from '@aztec/prover-client/orchestrator'; +import { type SimulationProvider } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +/** + * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. + * This class is unused at the moment, but could be leveraged by a prover-node to ascertain that it can prove a block before + * committing to proving it by sending a quote. + */ +export class OrchestratorBlockBuilder implements BlockBuilder { + private orchestrator: ProvingOrchestrator; + constructor(db: MerkleTreeWriteOperations, simulationProvider: SimulationProvider, telemetry: TelemetryClient) { + const testProver = new TestCircuitProver(telemetry, simulationProvider); + this.orchestrator = new ProvingOrchestrator(db, testProver, telemetry); + } + + startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { + return this.orchestrator.startNewBlock(numTxs, globalVariables, l1ToL2Messages); + } + setBlockCompleted(): Promise { + return this.orchestrator.setBlockCompleted(); + } + addNewTx(tx: ProcessedTx): Promise { + return this.orchestrator.addNewTx(tx); + } +} + +export class OrchestratorBlockBuilderFactory { + constructor(private simulationProvider: SimulationProvider, private telemetry?: TelemetryClient) {} + + create(db: MerkleTreeWriteOperations): BlockBuilder { + return new OrchestratorBlockBuilder(db, this.simulationProvider, this.telemetry ?? new NoopTelemetryClient()); + } +} diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index 66c24396853..3c01f3bf126 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -5,3 +5,6 @@ export * from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? export * from './global_variable_builder/index.js'; + +// Used by the validator to build blocks - should be moved to a shared library. +export * from './block_builder/index.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 0a88e51f520..00bdc47fb8d 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -20,6 +20,7 @@ import { AppendOnlyTreeSnapshot, ContentCommitment, GENESIS_ARCHIVE_ROOT, + type GlobalVariables, Header, StateReference, } from '@aztec/circuits.js'; @@ -95,6 +96,9 @@ export class Sequencer { this.updateConfig(config); this.metrics = new SequencerMetrics(telemetry, () => this.state, 'Sequencer'); this.log.verbose(`Initialized sequencer with ${this.minTxsPerBLock}-${this.maxTxsPerBlock} txs per block.`); + + // Register the block builder with the validator client for re-execution + this.validatorClient?.registerBlockBuilder(this.buildBlock.bind(this)); } get tracer(): Tracer { @@ -389,6 +393,60 @@ export class Sequencer { return true; } + /** + * Build a block + * + * Shared between the sequencer and the validator for re-execution + * + * @param validTxs - The valid transactions to construct the block from + * @param newGlobalVariables - The global variables for the new block + * @param historicalHeader - The historical header of the parent + * @param interrupt - The interrupt callback, used to validate the block for submission and check if we should propose the block + */ + private async buildBlock( + validTxs: Tx[], + newGlobalVariables: GlobalVariables, + historicalHeader?: Header, + interrupt?: (processedTxs: ProcessedTx[]) => Promise, + ) { + this.log.debug('Requesting L1 to L2 messages from contract'); + const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(newGlobalVariables.blockNumber.toBigInt()); + this.log.verbose( + `Retrieved ${l1ToL2Messages.length} L1 to L2 messages for block ${newGlobalVariables.blockNumber.toNumber()}`, + ); + + const numRealTxs = validTxs.length; + const blockSize = Math.max(2, numRealTxs); + + // Sync to the previous block at least + if (historicalHeader) { + await this.worldState.syncImmediate(historicalHeader.globalVariables.blockNumber.toNumber() - 1); + } + + const fork = await this.worldState.fork(); + // We create a fresh processor each time to reset any cached state (eg storage writes) + const processor = this.publicProcessorFactory.create(fork, historicalHeader, newGlobalVariables); + const blockBuildingTimer = new Timer(); + const blockBuilder = this.blockBuilderFactory.create(fork); + await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); + + const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => + processor.process(validTxs, blockSize, blockBuilder, this.txValidatorFactory.validatorForProcessedTxs(fork)), + ); + if (failedTxs.length > 0) { + const failedTxData = failedTxs.map(fail => fail.tx); + this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); + await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); + } + + await interrupt?.(processedTxs); + + // All real transactions have been added, set the block as full and complete the proving. + const block = await blockBuilder.setBlockCompleted(); + + return { block, publicProcessorDuration, numProcessedTxs: processedTxs.length, blockBuildingTimer }; + } + /** * @notice Build and propose a block to the chain * @@ -420,33 +478,13 @@ export class Sequencer { } slotNumber=${newGlobalVariables.slotNumber.toNumber()}`, ); - // Get l1 to l2 messages from the contract - this.log.debug('Requesting L1 to L2 messages from contract'); - const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(newGlobalVariables.blockNumber.toBigInt()); - this.log.verbose( - `Retrieved ${l1ToL2Messages.length} L1 to L2 messages for block ${newGlobalVariables.blockNumber.toNumber()}`, - ); - - const numRealTxs = validTxs.length; - const blockSize = Math.max(2, numRealTxs); - - const fork = await this.worldState.fork(); - try { - // We create a fresh processor each time to reset any cached state (eg storage writes) - const processor = this.publicProcessorFactory.create(fork, historicalHeader, newGlobalVariables); - const blockBuildingTimer = new Timer(); - const blockBuilder = this.blockBuilderFactory.create(fork); - await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); - - const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process(validTxs, blockSize, blockBuilder, this.txValidatorFactory.validatorForProcessedTxs(fork)), - ); - if (failedTxs.length > 0) { - const failedTxData = failedTxs.map(fail => fail.tx); - this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); - await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); - } - + /** + * BuildBlock is shared between the sequencer and the validator for re-execution + * We use the interrupt callback to validate the block for submission and check if we should propose the block + * + * If we fail, we throw an error in order to roll back + */ + const interrupt = async (processedTxs: ProcessedTx[]) => { await this.publisher.validateBlockForSubmission(proposalHeader); if ( @@ -458,9 +496,16 @@ export class Sequencer { // TODO: Roll back changes to world state throw new Error('Should not propose the block'); } + }; - // All real transactions have been added, set the block as full and complete the proving. - const block = await blockBuilder.setBlockCompleted(); + const fork = await this.worldState.fork(); + try { + const { block, publicProcessorDuration, numProcessedTxs, blockBuildingTimer } = await this.buildBlock( + validTxs, + newGlobalVariables, + historicalHeader, + interrupt, + ); // TODO(@PhilWindle) We should probably periodically check for things like another // block being published before ours instead of just waiting on our block @@ -501,7 +546,7 @@ export class Sequencer { await this.publishL2Block(block, attestations, txHashes, proofQuote); this.metrics.recordPublishedBlock(workDuration); this.log.info( - `Submitted rollup block ${block.number} with ${processedTxs.length} transactions duration=${Math.ceil( + `Submitted rollup block ${block.number} with ${numProcessedTxs} transactions duration=${Math.ceil( workDuration, )}ms (Submitter: ${this.publisher.getSenderAddress()})`, ); diff --git a/yarn-project/simulator/src/public/index.ts b/yarn-project/simulator/src/public/index.ts index 5bc58ac6e62..5f744c3211f 100644 --- a/yarn-project/simulator/src/public/index.ts +++ b/yarn-project/simulator/src/public/index.ts @@ -8,5 +8,4 @@ export * from './public_db_sources.js'; export * from './public_kernel.js'; export * from './public_kernel_circuit_simulator.js'; export { PublicProcessor, PublicProcessorFactory } from './public_processor.js'; -export { LightPublicProcessor } from './light_public_processor.js'; export { PublicSideEffectTrace } from './side_effect_trace.js'; diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 094044f416d..8d0266d2456 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -73,3 +73,5 @@ export const WORLD_STATE_MERKLE_TREE_SIZE = 'aztec.world_state.merkle_tree_size' export const WORLD_STATE_DB_SIZE = 'aztec.world_state.db_size'; export const PROOF_VERIFIER_COUNT = 'aztec.proof_verifier.count'; + +export const VALIDATOR_RE_EXECUTION_TIME = 'aztec.validator.re_execution_time'; diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index fa82fb39e2c..0ebda116dcf 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -51,7 +51,7 @@ export const validatorClientConfigMappings: ConfigMappingsType[] = [ - new DataTxValidator(), - new MetadataTxValidator(globalVariables.chainId, globalVariables.blockNumber), - new DoubleSpendTxValidator(worldStateDB), - ]; - const txValidator: TxValidator = new AggregateTxValidator(...txValidaors); - - return new LightPublicProcessor( - merkleTrees, - worldStateDB, - globalVariables, - historicalHeader, - txValidator, - this.telemetryClient, - ); - } -} diff --git a/yarn-project/validator-client/src/errors/validator.error.ts b/yarn-project/validator-client/src/errors/validator.error.ts index 0c1bac26ead..c19b118f398 100644 --- a/yarn-project/validator-client/src/errors/validator.error.ts +++ b/yarn-project/validator-client/src/errors/validator.error.ts @@ -36,8 +36,8 @@ export class ReExStateMismatchError extends ValidatorError { } } -export class PublicProcessorNotProvidedError extends ValidatorError { +export class BlockBuilderNotProvidedError extends ValidatorError { constructor() { - super('Public processor not provided'); + super('Block builder not provided'); } } diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 44d874fd300..71ceac8bb9d 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -1,21 +1,12 @@ -import { type ArchiveSource } from '@aztec/archiver'; -import { type WorldStateSynchronizer } from '@aztec/circuit-types'; import { type P2P } from '@aztec/p2p'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { generatePrivateKey } from 'viem/accounts'; import { type ValidatorClientConfig } from './config.js'; -import { LightPublicProcessorFactory } from './duties/light_public_processor_factory.js'; import { ValidatorClient } from './validator.js'; -export function createValidatorClient( - config: ValidatorClientConfig, - p2pClient: P2P, - worldStateSynchronizer: WorldStateSynchronizer, - archiver: ArchiveSource, - telemetry: TelemetryClient, -) { +export function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P, telemetry: TelemetryClient) { if (config.disableValidator) { return undefined; } @@ -25,8 +16,7 @@ export function createValidatorClient( // We only craete a public processor factory if re-execution is enabled if (config.validatorReEx) { - const publicProcessorFactory = new LightPublicProcessorFactory(worldStateSynchronizer, archiver, telemetry); - return ValidatorClient.new(config, p2pClient, telemetry, publicProcessorFactory); + return ValidatorClient.new(config, p2pClient, telemetry); } return ValidatorClient.new(config, p2pClient, telemetry); diff --git a/yarn-project/validator-client/src/metrics.ts b/yarn-project/validator-client/src/metrics.ts new file mode 100644 index 00000000000..45053b138ad --- /dev/null +++ b/yarn-project/validator-client/src/metrics.ts @@ -0,0 +1,27 @@ +import { type Gauge, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; + +export class ValidatorMetrics { + private reExecutionTime: Gauge; + + constructor(private telemetryClient: TelemetryClient) { + const meter = telemetryClient.getMeter('Validator'); + + this.reExecutionTime = meter.createGauge(Metrics.VALIDATOR_RE_EXECUTION_TIME, { + description: 'The time taken to re-execute a transaction', + unit: 'ms', + valueType: ValueType.DOUBLE, + }); + } + + public reExecutionTimer(): () => void { + const start = performance.now(); + return () => { + const end = performance.now(); + this.recordReExecutionTime(end - start); + }; + } + + public recordReExecutionTime(time: number) { + this.reExecutionTime.record(time); + } +} diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 3e7a83eb332..6e1ddd7e9c1 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -1,13 +1,13 @@ /** * Validation logic unit tests */ -import { TxHash } from '@aztec/circuit-types'; +import { mockTx, TxHash } from '@aztec/circuit-types'; import { makeHeader } from '@aztec/circuits.js/testing'; import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type P2P } from '@aztec/p2p'; -import { type LightPublicProcessor } from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { describe, expect, it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -15,33 +15,24 @@ import { type PrivateKeyAccount, generatePrivateKey, privateKeyToAccount } from import { makeBlockAttestation, makeBlockProposal } from '../../circuit-types/src/p2p/mocks.js'; import { type ValidatorClientConfig } from './config.js'; -import { type LightPublicProcessorFactory } from './duties/light_public_processor_factory.js'; import { AttestationTimeoutError, + BlockBuilderNotProvidedError, InvalidValidatorPrivateKeyError, - PublicProcessorNotProvidedError, TransactionsNotAvailableError, } from './errors/validator.error.js'; import { ValidatorClient } from './validator.js'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; describe('ValidationService', () => { let config: ValidatorClientConfig; let validatorClient: ValidatorClient; let p2pClient: MockProxy; let validatorAccount: PrivateKeyAccount; - let lightPublicProcessorFactory: MockProxy; - let lightPublicProcessor: MockProxy; beforeEach(() => { p2pClient = mock(); p2pClient.getAttestationsForSlot.mockImplementation(() => Promise.resolve([])); - lightPublicProcessorFactory = mock(); - lightPublicProcessor = mock(); - - lightPublicProcessorFactory.createWithSyncedState.mockImplementation(() => Promise.resolve(lightPublicProcessor)); - const validatorPrivateKey = generatePrivateKey(); validatorAccount = privateKeyToAccount(validatorPrivateKey); @@ -62,9 +53,11 @@ describe('ValidationService', () => { ); }); - it('Should throw an error if re-execution is enabled but no public processor is provided', () => { + it('Should throw an error if re-execution is enabled but no block builder is provided', async () => { config.validatorReEx = true; - expect(() => ValidatorClient.new(config, p2pClient)).toThrow(PublicProcessorNotProvidedError); + p2pClient.getTxByHash.mockImplementation(() => Promise.resolve(mockTx())); + const val = ValidatorClient.new(config, p2pClient); + await expect(val.reExecuteTransactions(makeBlockProposal())).rejects.toThrow(BlockBuilderNotProvidedError); }); it('Should create a valid block proposal', async () => { @@ -105,9 +98,9 @@ describe('ValidationService', () => { // mock the p2pClient.getTxStatus to return undefined for all transactions p2pClient.getTxStatus.mockImplementation(() => undefined); - const val = ValidatorClient.new(config, p2pClient, new NoopTelemetryClient(), lightPublicProcessorFactory); - lightPublicProcessor.process.mockImplementation(() => { - throw new Error(); + const val = ValidatorClient.new(config, p2pClient, new NoopTelemetryClient()); + val.registerBlockBuilder(() => { + throw new Error('Failed to build block'); }); const attestation = await val.attestToProposal(proposal); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index f4741fc0ac6..7789a1fab48 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,31 +1,50 @@ -import { type BlockAttestation, type BlockProposal, type Tx, type TxHash } from '@aztec/circuit-types'; -import { type Header } from '@aztec/circuits.js'; +import { + type BlockAttestation, + type BlockProposal, + type L2Block, + type ProcessedTx, + type Tx, + type TxHash, +} from '@aztec/circuit-types'; +import { type GlobalVariables, type Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; -import { attachedFixedDataToLogger, createDebugLogger } from '@aztec/foundation/log'; +import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; -import { Timer } from '@aztec/foundation/timer'; +import { type Timer } from '@aztec/foundation/timer'; import { type P2P } from '@aztec/p2p'; import { type TelemetryClient, WithTracer } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type ValidatorClientConfig } from './config.js'; -import { type LightPublicProcessorFactory } from './duties/light_public_processor_factory.js'; import { ValidationService } from './duties/validation_service.js'; import { AttestationTimeoutError, + BlockBuilderNotProvidedError, InvalidValidatorPrivateKeyError, - PublicProcessorNotProvidedError, ReExStateMismatchError, TransactionsNotAvailableError, } from './errors/validator.error.js'; import { type ValidatorKeyStore } from './key_store/interface.js'; import { LocalKeyStore } from './key_store/local_key_store.js'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { PublicProcessor } from '@aztec/simulator'; +import { ValidatorMetrics } from './metrics.js'; + +/** + * Callback function for building a block + * + * We reuse the sequencer's block building functionality for re-execution + */ +type BlockBuilderCallback = ( + txs: Tx[], + globalVariables: GlobalVariables, + historicalHeader?: Header, + interrupt?: (processedTxs: ProcessedTx[]) => Promise, +) => Promise<{ block: L2Block; publicProcessorDuration: number; numProcessedTxs: number; blockBuildingTimer: Timer }>; export interface Validator { start(): Promise; registerBlockProposalHandler(): void; + registerBlockBuilder(blockBuilder: BlockBuilderCallback): void; // Block validation responsiblities createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise; @@ -35,44 +54,38 @@ export interface Validator { collectAttestations(proposal: BlockProposal, numberOfRequiredAttestations: number): Promise; } -/** Validator Client +/** + * Validator Client */ export class ValidatorClient extends WithTracer implements Validator { private validationService: ValidationService; + private metrics: ValidatorMetrics; + + private blockBuilder?: BlockBuilderCallback = undefined; constructor( keyStore: ValidatorKeyStore, private p2pClient: P2P, private config: ValidatorClientConfig, - private publicProcessorFactory?: PublicProcessorFactory | undefined, - private telemetry: TelemetryClient, + private telemetry: TelemetryClient = new NoopTelemetryClient(), private log = createDebugLogger('aztec:validator'), ) { super(telemetry, 'Validator'); + this.metrics = new ValidatorMetrics(telemetry); this.validationService = new ValidationService(keyStore); this.log.verbose('Initialized validator'); } - static new( - config: ValidatorClientConfig, - p2pClient: P2P, - telemetry?: TelemetryClient = new NoopTelemetryClient(), - publicProcessorFactory?: LightPublicProcessorFactory | undefined, - ) { + static new(config: ValidatorClientConfig, p2pClient: P2P, telemetry: TelemetryClient = new NoopTelemetryClient()) { if (!config.validatorPrivateKey) { throw new InvalidValidatorPrivateKeyError(); } - //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 - if (config.validatorReEx && publicProcessorFactory === undefined) { - throw new PublicProcessorNotProvidedError(); - } - const privateKey = validatePrivateKey(config.validatorPrivateKey); const localKeyStore = new LocalKeyStore(privateKey); - const validator = new ValidatorClient(localKeyStore, p2pClient, config, publicProcessorFactory, telemetry); + const validator = new ValidatorClient(localKeyStore, p2pClient, config, telemetry); validator.registerBlockProposalHandler(); return validator; } @@ -92,6 +105,15 @@ export class ValidatorClient extends WithTracer implements Validator { this.p2pClient.registerBlockProposalHandler(handler); } + /** + * Register a callback function for building a block + * + * We reuse the sequencer's block building functionality for re-execution + */ + public registerBlockBuilder(blockBuilder: BlockBuilderCallback) { + this.blockBuilder = blockBuilder; + } + async attestToProposal(proposal: BlockProposal): Promise { // Check that all of the tranasctions in the proposal are available in the tx pool before attesting this.log.verbose(`request to attest`, { @@ -129,7 +151,9 @@ export class ValidatorClient extends WithTracer implements Validator { async reExecuteTransactions(proposal: BlockProposal) { const { header, txHashes } = proposal.payload; - const txs = (await Promise.all(txHashes.map(tx => this.p2pClient.getTxByHash(tx)))).filter(tx => tx !== undefined); + const txs = (await Promise.all(txHashes.map(tx => this.p2pClient.getTxByHash(tx)))).filter( + tx => tx !== undefined, + ) as Tx[]; // If we cannot request all of the transactions, then we should fail if (txs.length !== txHashes.length) { @@ -138,68 +162,23 @@ export class ValidatorClient extends WithTracer implements Validator { } // Assertion: This check will fail if re-execution is not enabled - if (!this.lightPublicProcessorFactory) { - throw new PublicProcessorNotProvidedError(); + if (this.blockBuilder === undefined) { + throw new BlockBuilderNotProvidedError(); } // We sync the state to the previous block as the public processor will not process the current block const targetBlockNumber = header.globalVariables.blockNumber.toNumber() - 1; this.log.verbose(`Re-ex: Syncing state to block number ${targetBlockNumber}`); - const lightProcessor = await this.lightPublicProcessorFactory.createWithSyncedState( - targetBlockNumber, - undefined, - header.globalVariables, - ); + // Use the sequencer's block building logic to re-execute the transactions + const stopTimer = this.metrics.reExecutionTimer(); + const { block } = await this.blockBuilder(txs, header.globalVariables); + stopTimer(); - this.log.verbose(`Re-ex: Re-executing transactions`); - const timer = new Timer(); - await lightProcessor.process(txs as Tx[]); - this.log.verbose(`Re-ex: Re-execution complete ${timer.ms()}ms`); + this.log.verbose(`Re-ex: Re-execution complete`); // This function will throw an error if state updates do not match - await this.checkReExecutedStateRoots(header, lightProcessor); - } - - /** - * Check that the state updates match the header state - * - * TODO(md): - * - Check archive - * - Check l1 to l2 messages - * - * @param header - The header to check - * @param lightProcessor - The light processor to check - */ - private async checkReExecutedStateRoots(header: Header, publicProcessor: PublicProcessor) { - const [newNullifierTree, newNoteHashTree, newPublicDataTree] = await publicProcessor.getTreeSnapshots(); - - - // Add in the real block builder now - - - if (!header.state.partial.nullifierTree.root.toBuffer().equals(newNullifierTree.root)) { - this.log.error( - `Re-ex: nullifierTree does not match, ${header.state.partial.nullifierTree.root - .toBuffer() - .toString('hex')} !== ${newNullifierTree.root.toString('hex')}`, - ); - throw new ReExStateMismatchError(); - } - if (!header.state.partial.noteHashTree.root.toBuffer().equals(newNoteHashTree.root)) { - this.log.error( - `Re-ex: noteHashTree does not match, ${header.state.partial.noteHashTree.root - .toBuffer() - .toString('hex')} !== ${newNoteHashTree.root.toString('hex')}`, - ); - throw new ReExStateMismatchError(); - } - if (!header.state.partial.publicDataTree.root.toBuffer().equals(newPublicDataTree.root)) { - this.log.error( - `Re-ex: publicDataTree does not match, ${header.state.partial.publicDataTree.root - .toBuffer() - .toString('hex')} !== ${newPublicDataTree.root.toString('hex')}`, - ); + if (!block.archive.root.equals(proposal.archive)) { throw new ReExStateMismatchError(); } } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index fd88a24abfe..9758f65550e 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -3394,7 +3394,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/noir_codegen@portal:../noir/packages/noir_codegen::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/types": 0.34.0 + "@noir-lang/types": 0.37.0 glob: ^10.3.10 ts-command-line-args: ^2.5.1 bin: @@ -3403,13 +3403,13 @@ __metadata: linkType: soft "@noir-lang/noir_js@file:../noir/packages/noir_js::locator=%40aztec%2Faztec3-packages%40workspace%3A.": - version: 0.34.0 - resolution: "@noir-lang/noir_js@file:../noir/packages/noir_js#../noir/packages/noir_js::hash=aa1d0b&locator=%40aztec%2Faztec3-packages%40workspace%3A." + version: 0.37.0 + resolution: "@noir-lang/noir_js@file:../noir/packages/noir_js#../noir/packages/noir_js::hash=7257df&locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/acvm_js": 0.50.0 - "@noir-lang/noirc_abi": 0.34.0 - "@noir-lang/types": 0.34.0 - checksum: 4d5102053ee8ef7e1258221275796493eeaef14664e5de238ce293151c90d0f9aaa19df1c0beb94b3a3911694e50bbe9f71a93497cdfc69cf0e5eb7ee6af3142 + "@noir-lang/acvm_js": 0.53.0 + "@noir-lang/noirc_abi": 0.37.0 + "@noir-lang/types": 0.37.0 + checksum: 50419bebe0146ec664232712abb190148a9bb4e9bcfe538a13a0107192e31045ec22efddeeb8eac3eb5ac6cb50f52782adddaaeb87df328ebea3caf55579a3c8 languageName: node linkType: hard @@ -3417,7 +3417,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/noirc_abi@portal:../noir/packages/noirc_abi::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/types": 0.34.0 + "@noir-lang/types": 0.37.0 languageName: node linkType: soft From 29c80ab78caaa0a9d390913d82a161277be42a4d Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 07:49:55 +0000 Subject: [PATCH 088/123] fmt test --- yarn-project/validator-client/src/validator.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 6e1ddd7e9c1..2b1076e12ca 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -1,7 +1,7 @@ /** * Validation logic unit tests */ -import { mockTx, TxHash } from '@aztec/circuit-types'; +import { TxHash, mockTx } from '@aztec/circuit-types'; import { makeHeader } from '@aztec/circuits.js/testing'; import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; From f74945c64ada3ae968fffcaae568e1d8e00ef0dd Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 08:12:44 +0000 Subject: [PATCH 089/123] fmt --- .../src/sequencer/sequencer.ts | 29 ++++++++++--------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 6f0a896b826..9cfa5066400 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -434,7 +434,12 @@ export class Sequencer { await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process(validTxs, blockSize, blockBuilder, this.txValidatorFactory.validatorForProcessedTxs(publicProcessorFork)), + processor.process( + validTxs, + blockSize, + blockBuilder, + this.txValidatorFactory.validatorForProcessedTxs(publicProcessorFork), + ), ); if (failedTxs.length > 0) { const failedTxData = failedTxs.map(fail => fail.tx); @@ -547,18 +552,16 @@ export class Sequencer { const proofQuote = await this.createProofClaimForPreviousEpoch(newGlobalVariables.slotNumber.toBigInt()); this.log.verbose(proofQuote ? `Using proof quote ${inspect(proofQuote.payload)}` : 'No proof quote available'); - try { - await this.publishL2Block(block, attestations, txHashes, proofQuote); - this.metrics.recordPublishedBlock(workDuration); - this.log.info( - `Submitted rollup block ${block.number} with ${numProcessedTxs} transactions duration=${Math.ceil( - workDuration, - )}ms (Submitter: ${this.publisher.getSenderAddress()})`, - ); - } catch (err) { - this.metrics.recordFailedBlock(); - throw err; - } + await this.publishL2Block(block, attestations, txHashes, proofQuote); + this.metrics.recordPublishedBlock(workDuration); + this.log.info( + `Submitted rollup block ${block.number} with ${numProcessedTxs} transactions duration=${Math.ceil( + workDuration, + )}ms (Submitter: ${this.publisher.getSenderAddress()})`, + ); + } catch (err) { + this.metrics.recordFailedBlock(); + throw err; } } From 1202cd4f7487a4ea4f57ddb97daa94ad23b32243 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 08:23:18 +0000 Subject: [PATCH 090/123] fix: remove duplicate --- .../src/block_builder/src/index.ts | 7 - .../src/block_builder/src/light.test.ts | 351 ------------------ .../src/block_builder/src/light.ts | 91 ----- .../src/block_builder/src/orchestrator.ts | 43 --- 4 files changed, 492 deletions(-) delete mode 100644 yarn-project/sequencer-client/src/block_builder/src/index.ts delete mode 100644 yarn-project/sequencer-client/src/block_builder/src/light.test.ts delete mode 100644 yarn-project/sequencer-client/src/block_builder/src/light.ts delete mode 100644 yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts diff --git a/yarn-project/sequencer-client/src/block_builder/src/index.ts b/yarn-project/sequencer-client/src/block_builder/src/index.ts deleted file mode 100644 index c6c151edcc1..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/src/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { type BlockBuilder, type MerkleTreeReadOperations } from '@aztec/circuit-types'; - -export * from './orchestrator.js'; -export * from './light.js'; -export interface BlockBuilderFactory { - create(db: MerkleTreeReadOperations): BlockBuilder; -} diff --git a/yarn-project/sequencer-client/src/block_builder/src/light.test.ts b/yarn-project/sequencer-client/src/block_builder/src/light.test.ts deleted file mode 100644 index b909191f070..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/src/light.test.ts +++ /dev/null @@ -1,351 +0,0 @@ -import { TestCircuitProver } from '@aztec/bb-prover'; -import { - MerkleTreeId, - type MerkleTreeWriteOperations, - type ProcessedTx, - type ServerCircuitProver, - makeEmptyProcessedTx, -} from '@aztec/circuit-types'; -import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; -import { - type AppendOnlyTreeSnapshot, - type BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BlockRootRollupInputs, - Fr, - type GlobalVariables, - L1_TO_L2_MSG_SUBTREE_HEIGHT, - L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - MembershipWitness, - MergeRollupInputs, - NESTED_RECURSIVE_PROOF_LENGTH, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - NUM_BASE_PARITY_PER_ROOT_PARITY, - type ParityPublicInputs, - PreviousRollupData, - PrivateBaseRollupInputs, - PrivateTubeData, - type RecursiveProof, - RootParityInput, - RootParityInputs, - TUBE_VK_INDEX, - VK_TREE_HEIGHT, - type VerificationKeyAsFields, - VkWitnessData, - makeEmptyRecursiveProof, -} from '@aztec/circuits.js'; -import { makeGlobalVariables } from '@aztec/circuits.js/testing'; -import { padArrayEnd, times } from '@aztec/foundation/collection'; -import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { type Tuple, assertLength } from '@aztec/foundation/serialize'; -import { - ProtocolCircuitVks, - TubeVk, - getVKIndex, - getVKSiblingPath, - getVKTreeRoot, -} from '@aztec/noir-protocol-circuits-types'; -import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; -import { - buildBaseRollupHints, - buildHeaderFromCircuitOutputs, - getRootTreeSiblingPath, - getSubtreeSiblingPath, - getTreeSnapshot, -} from '@aztec/prover-client/helpers'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { type MerkleTreeAdminDatabase, NativeWorldStateService } from '@aztec/world-state'; - -import { jest } from '@jest/globals'; - -import { LightweightBlockBuilder } from './light.js'; - -jest.setTimeout(50_000); - -describe('LightBlockBuilder', () => { - let simulator: ServerCircuitProver; - let logger: DebugLogger; - let globals: GlobalVariables; - let l1ToL2Messages: Fr[]; - let vkRoot: Fr; - - let db: MerkleTreeAdminDatabase; - let fork: MerkleTreeWriteOperations; - let expectsFork: MerkleTreeWriteOperations; - let builder: LightweightBlockBuilder; - - let emptyProof: RecursiveProof; - - beforeAll(async () => { - logger = createDebugLogger('aztec:sequencer-client:test:block-builder'); - simulator = new TestCircuitProver(new NoopTelemetryClient()); - vkRoot = getVKTreeRoot(); - emptyProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH); - db = await NativeWorldStateService.tmp(); - }); - - beforeEach(async () => { - globals = makeGlobalVariables(1, { chainId: Fr.ZERO, version: Fr.ZERO }); - l1ToL2Messages = times(7, i => new Fr(i + 1)); - fork = await db.fork(); - expectsFork = await db.fork(); - builder = new LightweightBlockBuilder(fork, new NoopTelemetryClient()); - }); - - afterEach(async () => { - await fork.close(); - await expectsFork.close(); - }); - - afterAll(async () => { - await db.close(); - }); - - it('builds a 2 tx header', async () => { - const txs = times(2, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); - - expect(header).toEqual(expectedHeader); - }); - - it('builds a 3 tx header', async () => { - const txs = times(3, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const merge = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - return Promise.resolve([merge, rollupOutputs[2]]); - }); - - expect(header).toEqual(expectedHeader); - }); - - it('builds a 4 tx header', async () => { - const txs = times(4, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); - return [mergeLeft, mergeRight]; - }); - - expect(header).toEqual(expectedHeader); - }); - - it('builds a 4 tx header with no l1 to l2 messages', async () => { - const l1ToL2Messages: Fr[] = []; - const txs = times(4, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); - return [mergeLeft, mergeRight]; - }); - - expect(header).toEqual(expectedHeader); - }); - - it('builds a 5 tx header', async () => { - const txs = times(5, makeTx); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { - const merge10 = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); - const merge11 = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); - const merge20 = await getMergeOutput(merge10, merge11); - return [merge20, rollupOutputs[4]]; - }); - - expect(header).toEqual(expectedHeader); - }); - - it('builds a single tx header', async () => { - const txs = times(1, i => makeBloatedProcessedTx(fork, vkRoot, protocolContractTreeRoot, i)); - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); - - expect(header).toEqual(expectedHeader); - }); - - it('builds an empty header', async () => { - const txs: ProcessedTx[] = []; - const header = await buildHeader(txs, l1ToL2Messages); - - const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); - - expect(header).toEqual(expectedHeader); - }); - - // Makes a tx with a non-zero inclusion fee for testing - const makeTx = (i: number) => - makeBloatedProcessedTx(fork, vkRoot, protocolContractTreeRoot, i, { inclusionFee: new Fr(i) }); - - // Builds the block header using the ts block builder - const buildHeader = async (txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { - const txCount = Math.max(2, txs.length); - await builder.startNewBlock(txCount, globals, l1ToL2Messages); - for (const tx of txs) { - await builder.addNewTx(tx); - } - const { header } = await builder.setBlockCompleted(); - return header; - }; - - // Builds the block header using circuit outputs - // Requires a callback for manually assembling the merge rollup tree - const buildExpectedHeader = async ( - txs: ProcessedTx[], - l1ToL2Messages: Fr[], - getTopMerges?: ( - rollupOutputs: BaseOrMergeRollupPublicInputs[], - ) => Promise<[BaseOrMergeRollupPublicInputs, BaseOrMergeRollupPublicInputs]>, - ) => { - if (txs.length <= 2) { - // Pad if we don't have enough txs - txs = [ - ...txs, - ...times(2 - txs.length, () => - makeEmptyProcessedTx( - expectsFork.getInitialHeader(), - globals.chainId, - globals.version, - vkRoot, - protocolContractTreeRoot, - ), - ), - ]; - // No need to run a merge if there's 0-2 txs - getTopMerges = rollupOutputs => Promise.resolve([rollupOutputs[0], rollupOutputs[1]]); - } - - const rollupOutputs = await getPrivateBaseRollupOutputs(txs); - const [mergeLeft, mergeRight] = await getTopMerges!(rollupOutputs); - const l1ToL2Snapshot = await getL1ToL2Snapshot(l1ToL2Messages); - const parityOutput = await getParityOutput(l1ToL2Messages); - const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); - const rootOutput = await getBlockRootOutput(mergeLeft, mergeRight, parityOutput, l1ToL2Snapshot); - const expectedHeader = buildHeaderFromCircuitOutputs( - [mergeLeft, mergeRight], - parityOutput, - rootOutput, - messageTreeSnapshot, - logger, - ); - - expect(expectedHeader.hash()).toEqual(rootOutput.endBlockHash); - return expectedHeader; - }; - - const getL1ToL2Snapshot = async (msgs: Fr[]) => { - const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - - const newL1ToL2MessageTreeRootSiblingPath = padArrayEnd( - await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, expectsFork), - Fr.ZERO, - L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - ); - - const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsFork); - return { messageTreeSnapshot, newL1ToL2MessageTreeRootSiblingPath, l1ToL2Messages }; - }; - - const getPrivateBaseRollupOutputs = async (txs: ProcessedTx[]) => { - const rollupOutputs = []; - for (const tx of txs) { - const vkIndex = TUBE_VK_INDEX; - const vkPath = getVKSiblingPath(vkIndex); - const vkData = new VkWitnessData(TubeVk, vkIndex, vkPath); - const tubeData = new PrivateTubeData(tx.data, emptyProof, vkData); - const hints = await buildBaseRollupHints(tx, globals, expectsFork); - const inputs = new PrivateBaseRollupInputs(tubeData, hints); - const result = await simulator.getPrivateBaseRollupProof(inputs); - rollupOutputs.push(result.inputs); - } - return rollupOutputs; - }; - - const getMergeOutput = async (left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) => { - const baseRollupVk = ProtocolCircuitVks['PrivateBaseRollupArtifact'].keyAsFields; - const baseRollupVkWitness = getVkMembershipWitness(baseRollupVk); - const leftInput = new PreviousRollupData(left, emptyProof, baseRollupVk, baseRollupVkWitness); - const rightInput = new PreviousRollupData(right, emptyProof, baseRollupVk, baseRollupVkWitness); - const inputs = new MergeRollupInputs([leftInput, rightInput]); - const result = await simulator.getMergeRollupProof(inputs); - return result.inputs; - }; - - const getParityOutput = async (msgs: Fr[]) => { - const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - await expectsFork.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2Messages); - - const rootParityInputs: RootParityInput[] = []; - const baseParityVk = ProtocolCircuitVks['BaseParityArtifact'].keyAsFields; - const baseParityVkWitness = getVkMembershipWitness(baseParityVk); - for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) { - const input = BaseParityInputs.fromSlice(l1ToL2Messages, i, vkRoot); - const { publicInputs } = await simulator.getBaseParityProof(input); - const rootInput = new RootParityInput(emptyProof, baseParityVk, baseParityVkWitness.siblingPath, publicInputs); - rootParityInputs.push(rootInput); - } - - const rootParityInput = new RootParityInputs(assertLength(rootParityInputs, NUM_BASE_PARITY_PER_ROOT_PARITY)); - const result = await simulator.getRootParityProof(rootParityInput); - return result.publicInputs; - }; - - const getBlockRootOutput = async ( - left: BaseOrMergeRollupPublicInputs, - right: BaseOrMergeRollupPublicInputs, - parityOutput: ParityPublicInputs, - l1ToL2Snapshot: { - l1ToL2Messages: Tuple; - newL1ToL2MessageTreeRootSiblingPath: Tuple; - messageTreeSnapshot: AppendOnlyTreeSnapshot; - }, - ) => { - const mergeRollupVk = ProtocolCircuitVks['MergeRollupArtifact'].keyAsFields; - const mergeRollupVkWitness = getVkMembershipWitness(mergeRollupVk); - - const rollupLeft = new PreviousRollupData(left, emptyProof, mergeRollupVk, mergeRollupVkWitness); - const rollupRight = new PreviousRollupData(right, emptyProof, mergeRollupVk, mergeRollupVkWitness); - const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, expectsFork); - const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, expectsFork); - const previousBlockHashLeafIndex = BigInt(startArchiveSnapshot.nextAvailableLeafIndex - 1); - const previousBlockHash = (await expectsFork.getLeafValue(MerkleTreeId.ARCHIVE, previousBlockHashLeafIndex))!; - - const rootParityVk = ProtocolCircuitVks['RootParityArtifact'].keyAsFields; - const rootParityVkWitness = getVkMembershipWitness(rootParityVk); - - const rootParityInput = new RootParityInput( - emptyProof, - rootParityVk, - rootParityVkWitness.siblingPath, - parityOutput, - ); - - const inputs = BlockRootRollupInputs.from({ - previousRollupData: [rollupLeft, rollupRight], - l1ToL2Roots: rootParityInput, - newL1ToL2Messages: l1ToL2Snapshot.l1ToL2Messages, - newL1ToL2MessageTreeRootSiblingPath: l1ToL2Snapshot.newL1ToL2MessageTreeRootSiblingPath, - startL1ToL2MessageTreeSnapshot: l1ToL2Snapshot.messageTreeSnapshot, - startArchiveSnapshot, - newArchiveSiblingPath, - previousBlockHash, - proverId: Fr.ZERO, - }); - - const result = await simulator.getBlockRootRollupProof(inputs); - return result.inputs; - }; - - function getVkMembershipWitness(vk: VerificationKeyAsFields) { - const leafIndex = getVKIndex(vk); - return new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)); - } -}); diff --git a/yarn-project/sequencer-client/src/block_builder/src/light.ts b/yarn-project/sequencer-client/src/block_builder/src/light.ts deleted file mode 100644 index dc41539a486..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/src/light.ts +++ /dev/null @@ -1,91 +0,0 @@ -import { createDebugLogger } from '@aztec/aztec.js'; -import { - type BlockBuilder, - Body, - L2Block, - MerkleTreeId, - type MerkleTreeWriteOperations, - type ProcessedTx, - type TxEffect, - makeEmptyProcessedTx, - toTxEffect, -} from '@aztec/circuit-types'; -import { Fr, type GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; -import { padArrayEnd } from '@aztec/foundation/collection'; -import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; -import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; -import { buildBaseRollupHints, buildHeaderFromTxEffects, getTreeSnapshot } from '@aztec/prover-client/helpers'; -import { type TelemetryClient } from '@aztec/telemetry-client'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; - -/** - * Builds a block and its header from a set of processed tx without running any circuits. - */ -export class LightweightBlockBuilder implements BlockBuilder { - private numTxs?: number; - private globalVariables?: GlobalVariables; - private l1ToL2Messages?: Fr[]; - - private readonly txs: ProcessedTx[] = []; - - private readonly logger = createDebugLogger('aztec:sequencer-client:block_builder_light'); - - constructor(private db: MerkleTreeWriteOperations, private telemetry: TelemetryClient) {} - - async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { - this.logger.verbose('Starting new block', { numTxs, globalVariables: globalVariables.toJSON(), l1ToL2Messages }); - this.numTxs = numTxs; - this.globalVariables = globalVariables; - this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - - // Update L1 to L2 tree - await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!); - } - - async addNewTx(tx: ProcessedTx): Promise { - this.logger.verbose('Adding new tx to block', { txHash: tx.hash.toString() }); - this.txs.push(tx); - await buildBaseRollupHints(tx, this.globalVariables!, this.db); - } - - async setBlockCompleted(): Promise { - const paddingTxCount = this.numTxs! - this.txs.length; - this.logger.verbose(`Setting block as completed and adding ${paddingTxCount} padding txs`); - for (let i = 0; i < paddingTxCount; i++) { - await this.addNewTx( - makeEmptyProcessedTx( - this.db.getInitialHeader(), - this.globalVariables!.chainId, - this.globalVariables!.version, - getVKTreeRoot(), - protocolContractTreeRoot, - ), - ); - } - - return this.buildBlock(); - } - - private async buildBlock(): Promise { - this.logger.verbose(`Finalising block`); - const nonEmptyTxEffects: TxEffect[] = this.txs - .map(tx => toTxEffect(tx, this.globalVariables!.gasFees)) - .filter(txEffect => !txEffect.isEmpty()); - const body = new Body(nonEmptyTxEffects); - const header = await buildHeaderFromTxEffects(body, this.globalVariables!, this.l1ToL2Messages!, this.db); - - await this.db.updateArchive(header); - const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db); - - const block = new L2Block(newArchive, header, body); - return block; - } -} - -export class LightweightBlockBuilderFactory { - constructor(private telemetry?: TelemetryClient) {} - - create(db: MerkleTreeWriteOperations): BlockBuilder { - return new LightweightBlockBuilder(db, this.telemetry ?? new NoopTelemetryClient()); - } -} diff --git a/yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts b/yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts deleted file mode 100644 index 862963f10fe..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/src/orchestrator.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { TestCircuitProver } from '@aztec/bb-prover'; -import { - type BlockBuilder, - type L2Block, - type MerkleTreeWriteOperations, - type ProcessedTx, -} from '@aztec/circuit-types'; -import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; -import { ProvingOrchestrator } from '@aztec/prover-client/orchestrator'; -import { type SimulationProvider } from '@aztec/simulator'; -import { type TelemetryClient } from '@aztec/telemetry-client'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; - -/** - * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. - * This class is unused at the moment, but could be leveraged by a prover-node to ascertain that it can prove a block before - * committing to proving it by sending a quote. - */ -export class OrchestratorBlockBuilder implements BlockBuilder { - private orchestrator: ProvingOrchestrator; - constructor(db: MerkleTreeWriteOperations, simulationProvider: SimulationProvider, telemetry: TelemetryClient) { - const testProver = new TestCircuitProver(telemetry, simulationProvider); - this.orchestrator = new ProvingOrchestrator(db, testProver, telemetry); - } - - startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { - return this.orchestrator.startNewBlock(numTxs, globalVariables, l1ToL2Messages); - } - setBlockCompleted(): Promise { - return this.orchestrator.setBlockCompleted(); - } - addNewTx(tx: ProcessedTx): Promise { - return this.orchestrator.addNewTx(tx); - } -} - -export class OrchestratorBlockBuilderFactory { - constructor(private simulationProvider: SimulationProvider, private telemetry?: TelemetryClient) {} - - create(db: MerkleTreeWriteOperations): BlockBuilder { - return new OrchestratorBlockBuilder(db, this.simulationProvider, this.telemetry ?? new NoopTelemetryClient()); - } -} From 9ad8bac61d35176509b191c4fe898dd2ab4d02dd Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 08:24:36 +0000 Subject: [PATCH 091/123] fix: remove simulator from validator deps --- yarn-project/validator-client/package.json | 1 - yarn-project/yarn.lock | 1 - 2 files changed, 2 deletions(-) diff --git a/yarn-project/validator-client/package.json b/yarn-project/validator-client/package.json index f456c5f6d30..7fff0272c51 100644 --- a/yarn-project/validator-client/package.json +++ b/yarn-project/validator-client/package.json @@ -64,7 +64,6 @@ "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/p2p": "workspace:^", - "@aztec/simulator": "workspace:^", "@aztec/telemetry-client": "workspace:^", "@aztec/types": "workspace:^", "koa": "^2.14.2", diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 9758f65550e..256524913e3 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1244,7 +1244,6 @@ __metadata: "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/p2p": "workspace:^" - "@aztec/simulator": "workspace:^" "@aztec/telemetry-client": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 From 0fd38f53dff062e2d22b39a3b69ca72790bb0225 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 08:26:40 +0000 Subject: [PATCH 092/123] fix: clean --- yarn-project/validator-client/src/factory.ts | 5 ----- yarn-project/validator-client/src/validator.ts | 4 +++- yarn-project/validator-client/tsconfig.json | 3 --- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 71ceac8bb9d..56b5306969b 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -14,10 +14,5 @@ export function createValidatorClient(config: ValidatorClientConfig, p2pClient: config.validatorPrivateKey = generatePrivateKey(); } - // We only craete a public processor factory if re-execution is enabled - if (config.validatorReEx) { - return ValidatorClient.new(config, p2pClient, telemetry); - } - return ValidatorClient.new(config, p2pClient, telemetry); } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 7789a1fab48..a6bda7056e3 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -67,12 +67,14 @@ export class ValidatorClient extends WithTracer implements Validator { keyStore: ValidatorKeyStore, private p2pClient: P2P, private config: ValidatorClientConfig, - private telemetry: TelemetryClient = new NoopTelemetryClient(), + telemetry: TelemetryClient = new NoopTelemetryClient(), private log = createDebugLogger('aztec:validator'), ) { + // Instantiate tracer super(telemetry, 'Validator'); this.metrics = new ValidatorMetrics(telemetry); + //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 this.validationService = new ValidationService(keyStore); this.log.verbose('Initialized validator'); } diff --git a/yarn-project/validator-client/tsconfig.json b/yarn-project/validator-client/tsconfig.json index 095cc2f2824..17533523097 100644 --- a/yarn-project/validator-client/tsconfig.json +++ b/yarn-project/validator-client/tsconfig.json @@ -21,9 +21,6 @@ { "path": "../p2p" }, - { - "path": "../simulator" - }, { "path": "../telemetry-client" }, From 682539dffd373559ffadddcc5bce3431bcafc2c0 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 11:55:38 +0000 Subject: [PATCH 093/123] fix: reenable "flakey" p2p tests --- yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts | 5 ----- yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts | 3 +-- yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts | 2 +- 3 files changed, 2 insertions(+), 8 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts index 7a6e90d6258..25fb02acf2f 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts @@ -43,11 +43,6 @@ describe('e2e_p2p_gerousia', () => { } }); - /** - * There is some flaky behavior in here, likely similar to what is in the gossip test. - * For this reason we are not running it as part of the CI. - * TODO(https://github.com/AztecProtocol/aztec-packages/issues/9164): Currently flakey - */ it('Should cast votes to upgrade gerousia', async () => { // create the bootstrap node for the network if (!t.bootstrapNodeEnr) { diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index b59a5a7ed8b..2520cd225b4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -32,8 +32,7 @@ describe('e2e_p2p_network', () => { } }); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/9164): Currently flakey - it.skip('should rollup txs from all peers', async () => { + it('should rollup txs from all peers', async () => { // create the bootstrap node for the network if (!t.bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); diff --git a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts index c9d78f5d1ca..51c222e238a 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/rediscovery.test.ts @@ -32,7 +32,7 @@ describe('e2e_p2p_rediscovery', () => { } }); - it.skip('should re-discover stored peers without bootstrap node', async () => { + it('should re-discover stored peers without bootstrap node', async () => { const contexts: NodeContext[] = []; nodes = await createNodes( t.ctx.aztecNodeConfig, From 8151e7bb2ae5e30c1b692a0d2b2d049500d3314e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 13:22:12 +0000 Subject: [PATCH 094/123] fix: add e2e p2p to test config --- yarn-project/end-to-end/scripts/e2e_test_config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 4ed71e04d03..14352f7009b 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -82,6 +82,7 @@ tests: e2e_static_calls: {} e2e_synching: {} e2e_token_contract: {} + e2e_p2p: {} flakey_e2e_tests: test_path: './src/flakey' ignore_failures: true From b3f7f2c992479b7b387625b42ef5708e9437e4cc Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 6 Nov 2024 15:20:02 +0000 Subject: [PATCH 095/123] chore: run cluster with reex --- spartan/aztec-network/templates/validator.yaml | 2 ++ spartan/aztec-network/values.yaml | 1 + 2 files changed, 3 insertions(+) diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index 435aa92cc05..c2850e2c8f2 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -113,6 +113,8 @@ spec: value: "{{ .Values.validator.p2p.enabled }}" - name: VALIDATOR_DISABLED value: "{{ .Values.validator.validator.disabled }}" + - name: VALIDATOR_RE_EX + value: "{{ .Values.validator.validator.reEx }}" - name: SEQ_MAX_SECONDS_BETWEEN_BLOCKS value: "{{ .Values.validator.sequencer.maxSecondsBetweenBlocks }}" - name: SEQ_MIN_TX_PER_BLOCK diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 5d4bb793abb..eee892eae08 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -36,6 +36,7 @@ bootNode: minTxsPerBlock: 1 validator: disabled: true + reEx: true p2p: enabled: "true" resources: From d7dec7277e9e37930147cb15e8532fb139c4de57 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 06:11:33 +0000 Subject: [PATCH 096/123] fix: validator script --- .../scripts/native-network/validator.sh | 1 + .../telemetry-client/src/attributes.ts | 2 ++ yarn-project/telemetry-client/src/metrics.ts | 1 + yarn-project/validator-client/src/metrics.ts | 20 +++++++++++++++++-- .../validator-client/src/validator.ts | 1 + 5 files changed, 23 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index 518dbb9db97..359fce247a3 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -47,6 +47,7 @@ export SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} export ETHEREUM_HOST="http://127.0.0.1:8545" export P2P_ENABLED="true" +export VALIDATOR_RE_EX="true" export VALIDATOR_DISABLED="false" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" export SEQ_MIN_TX_PER_BLOCK="1" diff --git a/yarn-project/telemetry-client/src/attributes.ts b/yarn-project/telemetry-client/src/attributes.ts index f94f4fd6512..bb20486653e 100644 --- a/yarn-project/telemetry-client/src/attributes.ts +++ b/yarn-project/telemetry-client/src/attributes.ts @@ -54,6 +54,8 @@ export const BLOCK_TXS_COUNT = 'aztec.block.txs_count'; export const BLOCK_SIZE = 'aztec.block.size'; /** How many blocks are included in this epoch */ export const EPOCH_SIZE = 'aztec.epoch.size'; +/** The proposer of a block */ +export const BLOCK_PROPOSER = 'aztec.block.proposer'; /** The epoch number */ export const EPOCH_NUMBER = 'aztec.epoch.number'; /** The tx hash */ diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 8d0266d2456..243d743e309 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -75,3 +75,4 @@ export const WORLD_STATE_DB_SIZE = 'aztec.world_state.db_size'; export const PROOF_VERIFIER_COUNT = 'aztec.proof_verifier.count'; export const VALIDATOR_RE_EXECUTION_TIME = 'aztec.validator.re_execution_time'; +export const VALIDATOR_FAILED_REEXECUTION_COUNT = 'aztec.validator.failed_reexecution_count'; diff --git a/yarn-project/validator-client/src/metrics.ts b/yarn-project/validator-client/src/metrics.ts index 45053b138ad..c6da05ef71f 100644 --- a/yarn-project/validator-client/src/metrics.ts +++ b/yarn-project/validator-client/src/metrics.ts @@ -1,11 +1,19 @@ -import { type Gauge, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; +import { BlockProposal } from '@aztec/circuit-types'; +import { type Gauge, Metrics, type TelemetryClient, ValueType, type UpDownCounter, Attributes } from '@aztec/telemetry-client'; export class ValidatorMetrics { private reExecutionTime: Gauge; + private failedReexecutionCounter: UpDownCounter; - constructor(private telemetryClient: TelemetryClient) { + constructor(telemetryClient: TelemetryClient) { const meter = telemetryClient.getMeter('Validator'); + this.failedReexecutionCounter = meter.createUpDownCounter(Metrics.VALIDATOR_FAILED_REEXECUTION_COUNT, { + description: 'The number of failed re-executions', + unit: 'count', + valueType: ValueType.INT, + }); + this.reExecutionTime = meter.createGauge(Metrics.VALIDATOR_RE_EXECUTION_TIME, { description: 'The time taken to re-execute a transaction', unit: 'ms', @@ -24,4 +32,12 @@ export class ValidatorMetrics { public recordReExecutionTime(time: number) { this.reExecutionTime.record(time); } + + public recordFailedReexecution(proposal: BlockProposal) { + this.failedReexecutionCounter.add(1, { + [Attributes.STATUS]: 'failed', + [Attributes.BLOCK_NUMBER]: proposal.payload.header.globalVariables.blockNumber.toString(), + [Attributes.BLOCK_PROPOSER]: proposal.getSender()?.toString(), + }); + } } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index a6bda7056e3..3095980bd49 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -181,6 +181,7 @@ export class ValidatorClient extends WithTracer implements Validator { // This function will throw an error if state updates do not match if (!block.archive.root.equals(proposal.archive)) { + this.metrics.recordFailedReexecution(proposal); throw new ReExStateMismatchError(); } } From 22eab85d2aa8078eee58e2be79675ab429ea927a Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 06:19:36 +0000 Subject: [PATCH 097/123] fix --- yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts | 6 ++++-- yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts | 6 +++++- yarn-project/p2p/src/bootstrap/bootstrap.ts | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts index 25fb02acf2f..b4d0e91d169 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts @@ -17,7 +17,9 @@ import { P2PNetworkTest } from './p2p_network.js'; // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds const NUM_NODES = 4; -const BOOT_NODE_UDP_PORT = 40600; +// Note: these ports must be distinct from the other e2e tests, else the tests will +// interfere with each other. +const BOOT_NODE_UDP_PORT = 45000; const DATA_DIR = './data/gossip'; @@ -36,8 +38,8 @@ describe('e2e_p2p_gerousia', () => { }); afterEach(async () => { - await t.stopNodes(nodes); await t.teardown(); + await t.stopNodes(nodes); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); } diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 84a2aa53b8f..a105e4c9013 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -143,14 +143,18 @@ export class P2PNetworkTest { async stopNodes(nodes: AztecNodeService[]) { this.logger.info('Stopping nodes'); + + if (!nodes) return; + if (!nodes.length) return; + for (const node of nodes) { await node.stop(); } - await this.bootstrapNode.stop(); this.logger.info('Nodes stopped'); } async teardown() { + await this.bootstrapNode.stop(); await this.snapshotManager.teardown(); } } diff --git a/yarn-project/p2p/src/bootstrap/bootstrap.ts b/yarn-project/p2p/src/bootstrap/bootstrap.ts index b80ed067a4f..d3840ba5129 100644 --- a/yarn-project/p2p/src/bootstrap/bootstrap.ts +++ b/yarn-project/p2p/src/bootstrap/bootstrap.ts @@ -77,7 +77,7 @@ export class BootstrapNode { public async stop() { // stop libp2p await this.node?.stop(); - this.logger.debug('libp2p has stopped'); + this.logger.debug('Discv5 has stopped'); } /** From d77b1f4f897955639c08bb7163d779c30dd13467 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 06:36:11 +0000 Subject: [PATCH 098/123] run seperately --- yarn-project/end-to-end/scripts/e2e_test_config.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 14352f7009b..b414088ba22 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -82,7 +82,14 @@ tests: e2e_static_calls: {} e2e_synching: {} e2e_token_contract: {} - e2e_p2p: {} + e2e_p2p_gossip: + test_path: 'e2e_p2p/gossip_network.test.ts' + e2e_p2p_gerousia: + test_path: 'e2e_p2p/gerousia.test.ts' + e2e_p2p_rediscovery: + test_path: 'e2e_p2p/rediscovery.test.ts' + e2e_p2p_reqresp: + test_path: 'e2e_p2p/reqresp.test.ts' flakey_e2e_tests: test_path: './src/flakey' ignore_failures: true From 1748069725b39245ffc4f02d5473d29aa2760317 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 06:53:23 +0000 Subject: [PATCH 099/123] fix: extend allowlist to matching prefixes --- scripts/ci/get_e2e_jobs.sh | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index c7e9d26b55f..45b14a93f4c 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -47,21 +47,38 @@ allow_list=( "guides_writing_an_account_contract" ) -# Add labels from input to the allow_list +# Add labels from input to the allow_list, supports prefix matching +# E.g: +# e2e_p2p label will match e2e_p2p_gossip, e2e_p2p_rediscovery, e2e_p2p_reqresp etc. +# e2e_prover label will match e2e_prover_fake_proofs, e2e_prover_coordination etc. IFS=',' read -r -a input_labels <<<"$LABELS" -allow_list+=("${input_labels[@]}") +expanded_allow_list=() +for label in "${input_labels[@]}"; do + # For each input label, find all tests that start with this prefix + matching_tests=$(echo "$full_list" | tr ' ' '\n' | grep "^${label}" || true) + if [ ! -z "$matching_tests" ]; then + # Add all matching tests to the expanded allow list + while IFS= read -r test; do + expanded_allow_list+=("$test") + done <<< "$matching_tests" + else + # If no prefix match found, treat it as an exact match (original behavior) + expanded_allow_list+=("$label") + fi +done +allow_list+=("${expanded_allow_list[@]}") # Generate full list of targets, excluding specific entries, on one line test_list=$(echo "${full_list[@]}" | grep -v 'base' | grep -v 'bench' | grep -v "network" | grep -v 'devnet' | xargs echo) -# # If branch is master or allow_list contains 'e2e-all', return full list +# If branch is master or allow_list contains 'e2e-all', return full list if [[ "$BRANCH" == "master" ]] || [[ " ${allow_list[@]} " =~ "e2e_all" ]]; then # print as JSON list echo "$test_list" | jq -Rc 'split(" ")' exit 0 fi -# # Filter the test_list to include only items in the allow_list +# Filter the test_list to include only items in the allow_list filtered_list=() for item in $test_list; do for allowed in "${allow_list[@]}"; do @@ -71,5 +88,5 @@ for item in $test_list; do done done -# # Print the filtered list in JSON format +# Print the filtered list in JSON format echo ${filtered_list[@]} | jq -Rc 'split(" ")' From dbe94324783635e3897058ba0d4e510efbe8a74d Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 07:00:45 +0000 Subject: [PATCH 100/123] fix: shorten reqresp test name --- .../src/e2e_p2p/{reqresp_tx.test.ts => reqresp.test.ts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename yarn-project/end-to-end/src/e2e_p2p/{reqresp_tx.test.ts => reqresp.test.ts} (100%) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts similarity index 100% rename from yarn-project/end-to-end/src/e2e_p2p/reqresp_tx.test.ts rename to yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts From 4e76f7e0bc4fef1d9ddf5594ee080f34c2cc3843 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 07:28:14 +0000 Subject: [PATCH 101/123] reex by default --- yarn-project/end-to-end/scripts/native-network/validator.sh | 1 - yarn-project/validator-client/src/config.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index 359fce247a3..518dbb9db97 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -47,7 +47,6 @@ export SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} export ETHEREUM_HOST="http://127.0.0.1:8545" export P2P_ENABLED="true" -export VALIDATOR_RE_EX="true" export VALIDATOR_DISABLED="false" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" export SEQ_MIN_TX_PER_BLOCK="1" diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 0ebda116dcf..dbd9dbc855d 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -51,7 +51,7 @@ export const validatorClientConfigMappings: ConfigMappingsType Date: Thu, 7 Nov 2024 07:30:03 +0000 Subject: [PATCH 102/123] fix fmt --- yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index a105e4c9013..040c7f6cb15 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -144,8 +144,10 @@ export class P2PNetworkTest { async stopNodes(nodes: AztecNodeService[]) { this.logger.info('Stopping nodes'); - if (!nodes) return; - if (!nodes.length) return; + if (!nodes || !nodes.length) { + this.logger.info('No nodes to stop'); + return; + } for (const node of nodes) { await node.stop(); From fe62a9f925bee072afc3738bdaa40f83ed489e8a Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 07:44:10 +0000 Subject: [PATCH 103/123] fix: simplify matching script --- scripts/ci/get_e2e_jobs.sh | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index 45b14a93f4c..1dd463a0538 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -51,22 +51,23 @@ allow_list=( # E.g: # e2e_p2p label will match e2e_p2p_gossip, e2e_p2p_rediscovery, e2e_p2p_reqresp etc. # e2e_prover label will match e2e_prover_fake_proofs, e2e_prover_coordination etc. -IFS=',' read -r -a input_labels <<<"$LABELS" +IFS=',' read -r -a input_labels <<< "$LABELS" expanded_allow_list=() + for label in "${input_labels[@]}"; do # For each input label, find all tests that start with this prefix matching_tests=$(echo "$full_list" | tr ' ' '\n' | grep "^${label}" || true) - if [ ! -z "$matching_tests" ]; then - # Add all matching tests to the expanded allow list - while IFS= read -r test; do - expanded_allow_list+=("$test") - done <<< "$matching_tests" + + # If matching tests are found, add them to expanded_allow_list; otherwise, add the label itself + if [ -n "$matching_tests" ]; then + expanded_allow_list+=($matching_tests) else - # If no prefix match found, treat it as an exact match (original behavior) expanded_allow_list+=("$label") fi done -allow_list+=("${expanded_allow_list[@]}") + +# Add the input labels and expanded matches to allow_list +allow_list+=("${input_labels[@]}" "${expanded_allow_list[@]}") # Generate full list of targets, excluding specific entries, on one line test_list=$(echo "${full_list[@]}" | grep -v 'base' | grep -v 'bench' | grep -v "network" | grep -v 'devnet' | xargs echo) From 028c637953ad271873cb0fd2a3ad27d3f9a6c725 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 07:45:53 +0000 Subject: [PATCH 104/123] fix: simplify labels --- scripts/ci/get_e2e_jobs.sh | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index 45b14a93f4c..f738ad0d072 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -51,22 +51,24 @@ allow_list=( # E.g: # e2e_p2p label will match e2e_p2p_gossip, e2e_p2p_rediscovery, e2e_p2p_reqresp etc. # e2e_prover label will match e2e_prover_fake_proofs, e2e_prover_coordination etc. -IFS=',' read -r -a input_labels <<<"$LABELS" +IFS=',' read -r -a input_labels <<< "$LABELS" expanded_allow_list=() + for label in "${input_labels[@]}"; do # For each input label, find all tests that start with this prefix matching_tests=$(echo "$full_list" | tr ' ' '\n' | grep "^${label}" || true) - if [ ! -z "$matching_tests" ]; then - # Add all matching tests to the expanded allow list - while IFS= read -r test; do - expanded_allow_list+=("$test") - done <<< "$matching_tests" + + # If matching tests are found, add them to expanded_allow_list; otherwise, add the label itself + if [ -n "$matching_tests" ]; then + expanded_allow_list+=($matching_tests) else - # If no prefix match found, treat it as an exact match (original behavior) expanded_allow_list+=("$label") fi done -allow_list+=("${expanded_allow_list[@]}") + +# Add the input labels and expanded matches to allow_list +allow_list+=("${input_labels[@]}" "${expanded_allow_list[@]}") + # Generate full list of targets, excluding specific entries, on one line test_list=$(echo "${full_list[@]}" | grep -v 'base' | grep -v 'bench' | grep -v "network" | grep -v 'devnet' | xargs echo) From 80db367b20666ac417076e706cbee5b38f31d26e Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 08:20:42 +0000 Subject: [PATCH 105/123] fix: sync to immediate block --- yarn-project/sequencer-client/src/sequencer/sequencer.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index d6fe0a0e7e9..3741e63d1bf 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -505,9 +505,7 @@ export class Sequencer { const blockSize = Math.max(2, numRealTxs); // Sync to the previous block at least - if (historicalHeader) { - await this.worldState.syncImmediate(historicalHeader.globalVariables.blockNumber.toNumber() - 1); - } + await this.worldState.syncImmediate(newGlobalVariables.blockNumber.toNumber() - 1); // NB: separating the dbs because both should update the state const publicProcessorFork = await this.worldState.fork(); From 391899ab1ee1733c285588d3e797e04cec8169ee Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 08:24:03 +0000 Subject: [PATCH 106/123] fmt --- yarn-project/validator-client/src/metrics.ts | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/yarn-project/validator-client/src/metrics.ts b/yarn-project/validator-client/src/metrics.ts index c6da05ef71f..c1996fbb86f 100644 --- a/yarn-project/validator-client/src/metrics.ts +++ b/yarn-project/validator-client/src/metrics.ts @@ -1,5 +1,12 @@ -import { BlockProposal } from '@aztec/circuit-types'; -import { type Gauge, Metrics, type TelemetryClient, ValueType, type UpDownCounter, Attributes } from '@aztec/telemetry-client'; +import { type BlockProposal } from '@aztec/circuit-types'; +import { + Attributes, + type Gauge, + Metrics, + type TelemetryClient, + type UpDownCounter, + ValueType, +} from '@aztec/telemetry-client'; export class ValidatorMetrics { private reExecutionTime: Gauge; From dc82880b3df9c49c0e52d722483b470264639e44 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 7 Nov 2024 08:27:40 +0000 Subject: [PATCH 107/123] fix: add reex test --- yarn-project/end-to-end/scripts/e2e_test_config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index b414088ba22..1ffdd975d49 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -90,6 +90,8 @@ tests: test_path: 'e2e_p2p/rediscovery.test.ts' e2e_p2p_reqresp: test_path: 'e2e_p2p/reqresp.test.ts' + e2e_p2p_reex: + test_path: 'e2e_p2p/reex.test.ts' flakey_e2e_tests: test_path: './src/flakey' ignore_failures: true From a912ecd0180855c4f429f8f830d4838b1864ab1d Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 8 Nov 2024 13:30:09 +0000 Subject: [PATCH 108/123] fix: pr comments, re-ex rejection test --- .../aztec-network/templates/validator.yaml | 4 +- spartan/aztec-network/values.yaml | 2 +- .../end-to-end/src/e2e_p2p/reex.test.ts | 78 ++++++++++++++++--- yarn-project/foundation/src/config/env_var.ts | 2 +- yarn-project/sequencer-client/src/index.ts | 6 +- .../src/sequencer/sequencer.ts | 55 ++++++------- yarn-project/validator-client/src/config.ts | 6 +- .../validator-client/src/validator.ts | 7 +- 8 files changed, 108 insertions(+), 52 deletions(-) diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index bea64d6618c..97a24d74cb9 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -113,8 +113,8 @@ spec: value: "{{ .Values.validator.p2p.enabled }}" - name: VALIDATOR_DISABLED value: "{{ .Values.validator.validator.disabled }}" - - name: VALIDATOR_RE_EX - value: "{{ .Values.validator.validator.reEx }}" + - name: VALIDATOR_REEXECUTE + value: "{{ .Values.validator.validator.reexecute }}" - name: SEQ_MAX_SECONDS_BETWEEN_BLOCKS value: "{{ .Values.validator.sequencer.maxSecondsBetweenBlocks }}" - name: SEQ_MIN_TX_PER_BLOCK diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index c7cc313a23d..2673aae92a0 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -36,7 +36,6 @@ bootNode: minTxsPerBlock: 1 validator: disabled: true - reEx: true p2p: enabled: "true" resources: @@ -79,6 +78,7 @@ validator: enforceTimeTable: true validator: disabled: false + reexecute: true p2p: enabled: "true" startupProbe: diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index f91f3dd390e..45590d93dc9 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -5,8 +5,11 @@ import { beforeAll, describe, it } from '@jest/globals'; import fs from 'fs'; import { createNodes } from '../fixtures/setup_p2p_test.js'; -import { P2PNetworkTest, WAIT_FOR_TX_TIMEOUT } from './p2p_network.js'; +import { P2PNetworkTest } from './p2p_network.js'; import { submitComplexTxsTo } from './shared.js'; +import { BlockProposal, getHashedSignaturePayload } from '@aztec/circuit-types'; + +import { jest } from '@jest/globals'; const NUM_NODES = 4; const NUM_TXS_PER_NODE = 1; @@ -16,7 +19,11 @@ const DATA_DIR = './data/re-ex'; describe('e2e_p2p_reex', () => { let t: P2PNetworkTest; + let nodes: AztecNodeService[]; + beforeAll(async () => { + nodes = []; + t = await P2PNetworkTest.create('e2e_p2p_reex', NUM_NODES, BOOT_NODE_UDP_PORT); t.logger.verbose('Setup account'); @@ -33,6 +40,8 @@ describe('e2e_p2p_reex', () => { }); afterAll(async () => { + // shutdown all nodes. + await t.stopNodes(nodes); await t.teardown(); for (let i = 0; i < NUM_NODES; i++) { fs.rmSync(`${DATA_DIR}-${i}`, { recursive: true, force: true }); @@ -47,7 +56,7 @@ describe('e2e_p2p_reex', () => { t.ctx.aztecNodeConfig.validatorReEx = true; - const nodes: AztecNodeService[] = await createNodes( + nodes = await createNodes( t.ctx.aztecNodeConfig, t.peerIdPrivateKeys, t.bootstrapNodeEnr, @@ -55,6 +64,37 @@ describe('e2e_p2p_reex', () => { BOOT_NODE_UDP_PORT, ); + // Hook into the node and intercept re-execution logic, ensuring that it was infact called + const reExecutionSpies = [] + for (const node of nodes) { + // Make sure the nodes submit faulty proposals, in this case a faulty proposal is one where we remove one of the transactions + // Such that the calculated archive will be different! + jest.spyOn((node as any).p2pClient, 'broadcastProposal').mockImplementation(async (...args: unknown[]) => { + // We remove one of the transactions, therefore the block root will be different! + const proposal = args[0] as BlockProposal; + const { txHashes } = proposal.payload; + + // We need to mutate the proposal, so we cast to any + (proposal.payload as any).txHashes = txHashes.slice(0, txHashes.length - 1); + + // We sign over the proposal using the node's signing key + // Abusing javascript to access the nodes signing key + const signer = (node as any).sequencer.sequencer.validatorClient.validationService.keyStore; + const newProposal = new BlockProposal( + proposal.payload, + await signer.signMessage(getHashedSignaturePayload(proposal.payload)), + ); + + console.log(newProposal); + + return (node as any).p2pClient.p2pService.propagate(newProposal); + }); + + // Store re-execution spys node -> sequencer Client -> seqeuncer -> validator + const spy = jest.spyOn((node as any).sequencer.sequencer.validatorClient, 'reExecuteTransactions'); + reExecutionSpies.push(spy); + } + // wait a bit for peers to discover each other await sleep(4000); @@ -66,15 +106,31 @@ describe('e2e_p2p_reex', () => { }); const txs = await submitComplexTxsTo(t.logger, t.spamContract!, NUM_TXS_PER_NODE); - // now ensure that all txs were successfully mined - await Promise.all( - txs.map(async (tx: SentTx, i: number) => { - t.logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); - return tx.wait({ timeout: WAIT_FOR_TX_TIMEOUT }); - }), - ); + // We ensure that the transactions are NOT mined + try { + await Promise.all( + txs.map(async (tx: SentTx, i: number) => { + t.logger.info(`Waiting for tx ${i}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ); + } catch (e) { + t.logger.info('Failed to mine all txs, as planned'); + } + + // Check that we did call the re-execution logic + let reExecutionCalls = 0; + reExecutionSpies.forEach(spy => { + reExecutionCalls += Number((spy as any).mock.calls.length > 0); + }); + expect(reExecutionCalls).toBe(3); + + // Expect that all of the re-execution attempts failed with an invalid root + for (const spy of reExecutionSpies) { + for (const result of spy.mock.results) { + await expect(result.value).rejects.toThrow("Validator Error: Re-execution state mismatch"); + } + } - // shutdown all nodes. - await t.stopNodes(nodes); }); }); diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 0358c2aed18..d0b31fea547 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -141,7 +141,7 @@ export type EnvVar = | 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS' | 'VALIDATOR_DISABLED' | 'VALIDATOR_PRIVATE_KEY' - | 'VALIDATOR_RE_EX' + | 'VALIDATOR_REEXECUTE' | 'VERSION' | 'WS_BLOCK_CHECK_INTERVAL_MS' | 'WS_PROVEN_BLOCKS_ONLY' diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index 3c01f3bf126..0818cefac17 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -4,7 +4,5 @@ export * from './publisher/index.js'; export * from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? -export * from './global_variable_builder/index.js'; - -// Used by the validator to build blocks - should be moved to a shared library. -export * from './block_builder/index.js'; +// ISSUE(#9832) +export * from './global_variable_builder/index.js'; \ No newline at end of file diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 3741e63d1bf..405c58c3187 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -506,40 +506,43 @@ export class Sequencer { // Sync to the previous block at least await this.worldState.syncImmediate(newGlobalVariables.blockNumber.toNumber() - 1); + this.log.verbose(`Synced to previous block ${newGlobalVariables.blockNumber.toNumber() - 1}`); // NB: separating the dbs because both should update the state const publicProcessorFork = await this.worldState.fork(); const orchestratorFork = await this.worldState.fork(); - // We create a fresh processor each time to reset any cached state (eg storage writes) - const processor = this.publicProcessorFactory.create(publicProcessorFork, historicalHeader, newGlobalVariables); - const blockBuildingTimer = new Timer(); - const blockBuilder = this.blockBuilderFactory.create(orchestratorFork); - await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); - - const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process( - validTxs, - blockSize, - blockBuilder, - this.txValidatorFactory.validatorForProcessedTxs(publicProcessorFork), - ), - ); - if (failedTxs.length > 0) { - const failedTxData = failedTxs.map(fail => fail.tx); - this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); - await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); - } - - await interrupt?.(processedTxs); + try { + const processor = this.publicProcessorFactory.create(publicProcessorFork, historicalHeader, newGlobalVariables); + const blockBuildingTimer = new Timer(); + const blockBuilder = this.blockBuilderFactory.create(orchestratorFork); + await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); + + const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => + processor.process( + validTxs, + blockSize, + blockBuilder, + this.txValidatorFactory.validatorForProcessedTxs(publicProcessorFork), + ), + ); + if (failedTxs.length > 0) { + const failedTxData = failedTxs.map(fail => fail.tx); + this.log.debug(`Dropping failed txs ${Tx.getHashes(failedTxData).join(', ')}`); + await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); + } - // All real transactions have been added, set the block as full and complete the proving. - const block = await blockBuilder.setBlockCompleted(); + await interrupt?.(processedTxs); - await publicProcessorFork.close(); - await orchestratorFork.close(); + // All real transactions have been added, set the block as full and complete the proving. + const block = await blockBuilder.setBlockCompleted(); - return { block, publicProcessorDuration, numProcessedTxs: processedTxs.length, blockBuildingTimer }; + return { block, publicProcessorDuration, numProcessedTxs: processedTxs.length, blockBuildingTimer }; + } finally { + // We create a fresh processor each time to reset any cached state (eg storage writes) + await publicProcessorFork.close(); + await orchestratorFork.close(); + } } /** diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index dbd9dbc855d..1790e01893e 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -24,7 +24,7 @@ export interface ValidatorClientConfig { attestationWaitTimeoutMs: number; /** Re-execute transactions before attesting */ - validatorReEx: boolean; + validatorReexecute: boolean; } export const validatorClientConfigMappings: ConfigMappingsType = { @@ -48,8 +48,8 @@ export const validatorClientConfigMappings: ConfigMappingsType Date: Fri, 8 Nov 2024 14:08:42 +0000 Subject: [PATCH 109/123] fix: do not abbreviate reex --- .../src/e2e_p2p/gossip_network.test.ts | 2 +- yarn-project/end-to-end/src/e2e_p2p/reex.test.ts | 16 +++++++++------- .../src/protocol_contract_data.ts | 14 +++++++------- yarn-project/sequencer-client/src/index.ts | 2 +- .../validator-client/src/validator.test.ts | 4 ++-- yarn-project/validator-client/src/validator.ts | 2 +- 6 files changed, 21 insertions(+), 19 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index d6d316b4cb5..f3ecf945cd5 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -45,7 +45,7 @@ describe('e2e_p2p_network', () => { throw new Error('Bootstrap node ENR is not available'); } - t.ctx.aztecNodeConfig.validatorReEx = true; + t.ctx.aztecNodeConfig.validatorReexecute = true; // create our network of nodes and submit txs into each of them // the number of txs per node and the number of txs per rollup diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 45590d93dc9..8c16d4aa548 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -1,15 +1,15 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, sleep } from '@aztec/aztec.js'; -import { beforeAll, describe, it } from '@jest/globals'; +import { beforeAll, describe, it , jest } from '@jest/globals'; import fs from 'fs'; import { createNodes } from '../fixtures/setup_p2p_test.js'; import { P2PNetworkTest } from './p2p_network.js'; import { submitComplexTxsTo } from './shared.js'; -import { BlockProposal, getHashedSignaturePayload } from '@aztec/circuit-types'; -import { jest } from '@jest/globals'; +/* eslint-disable-next-line no-restricted-imports */ +import { BlockProposal, getHashedSignaturePayload } from '@aztec/circuit-types'; const NUM_NODES = 4; const NUM_TXS_PER_NODE = 1; @@ -24,7 +24,11 @@ describe('e2e_p2p_reex', () => { beforeAll(async () => { nodes = []; - t = await P2PNetworkTest.create('e2e_p2p_reex', NUM_NODES, BOOT_NODE_UDP_PORT); + t = await P2PNetworkTest.create({ + testName: 'e2e_p2p_reex', + numberOfNodes: NUM_NODES, + basePort: BOOT_NODE_UDP_PORT, + }); t.logger.verbose('Setup account'); await t.setupAccount(); @@ -54,7 +58,7 @@ describe('e2e_p2p_reex', () => { throw new Error('Bootstrap node ENR is not available'); } - t.ctx.aztecNodeConfig.validatorReEx = true; + t.ctx.aztecNodeConfig.validatorReexecute = true; nodes = await createNodes( t.ctx.aztecNodeConfig, @@ -85,8 +89,6 @@ describe('e2e_p2p_reex', () => { await signer.signMessage(getHashedSignaturePayload(proposal.payload)), ); - console.log(newProposal); - return (node as any).p2pClient.p2pService.propagate(newProposal); }); diff --git a/yarn-project/protocol-contracts/src/protocol_contract_data.ts b/yarn-project/protocol-contracts/src/protocol_contract_data.ts index 7eb1cdc3397..4d1ebfce940 100644 --- a/yarn-project/protocol-contracts/src/protocol_contract_data.ts +++ b/yarn-project/protocol-contracts/src/protocol_contract_data.ts @@ -50,14 +50,14 @@ export const ProtocolContractAddress: Record }; export const ProtocolContractLeaf = { - AuthRegistry: Fr.fromString('0x0931f3bf89563f3898ae9650851083cd560ad800c2e3c561c3853eec4dd7ea8b'), - ContractInstanceDeployer: Fr.fromString('0x266ea4c9917455daa905c1dd1a10753714c6d0369b6f2fe23feeca6de556d164'), - ContractClassRegisterer: Fr.fromString('0x1ccb7a219f72a851089e956d527997b01068d5a28c9ae96b35ebeb45f068af23'), - MultiCallEntrypoint: Fr.fromString('0x1d060217817cf472a579638db722903fd1bbc4c3bdb0ecefa5694c0d4eed851a'), - FeeJuice: Fr.fromString('0x1dab5b687d0c04d2f17a1c8623dea23e7416700891ba1c6e0e86ef678f4727cb'), - Router: Fr.fromString('0x00827d5a8aedb9627d9e5de04735600a4dbb817d4a2f51281aab991699f5de99'), + AuthRegistry: Fr.fromString('0x295f52c40413b660d817ceea60d07154322a035d28d18927b2ca84e8ec3b2115'), + ContractInstanceDeployer: Fr.fromString('0x01314b6c482a9d8f5418cd0d43c17a1c5899ae7c2e1d2f82817baaf3f3b45bd9'), + ContractClassRegisterer: Fr.fromString('0x1d591819cccc4031cc18a7865321c54f6344ae42a205782874d1f72648df2034'), + MultiCallEntrypoint: Fr.fromString('0x20a2e7e882045d27b3aa9e36188b8e45483b3c11652d4a46406699e5eb4efa9b'), + FeeJuice: Fr.fromString('0x14c62d13cca830462ea77bb787878cca3fb8fbb44ffe4d1662c5ffdf98889d5b'), + Router: Fr.fromString('0x05fa1b40b9addbd853af7577676ad31b8b4472eb15c0ac30e784b65b66c40172'), }; export const protocolContractTreeRoot = Fr.fromString( - '0x0f174f6837842b1004a9a41dd736800c12c5dc19f206aed35551b07f8ca6edfb', + '0x071b3ac4cc5d42228920bcbba6674d93eb77694a085ed413806fdfcf8a50b0b0', ); diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index 0818cefac17..1718ed0a3a6 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -5,4 +5,4 @@ export * from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? // ISSUE(#9832) -export * from './global_variable_builder/index.js'; \ No newline at end of file +export * from './global_variable_builder/index.js'; diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 2b1076e12ca..d60937f2fcc 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -41,7 +41,7 @@ describe('ValidationService', () => { attestationPollingIntervalMs: 1000, attestationWaitTimeoutMs: 1000, disableValidator: false, - validatorReEx: false, + validatorReexecute: false, }; validatorClient = ValidatorClient.new(config, p2pClient, new NoopTelemetryClient()); }); @@ -54,7 +54,7 @@ describe('ValidationService', () => { }); it('Should throw an error if re-execution is enabled but no block builder is provided', async () => { - config.validatorReEx = true; + config.validatorReexecute = true; p2pClient.getTxByHash.mockImplementation(() => Promise.resolve(mockTx())); const val = ValidatorClient.new(config, p2pClient); await expect(val.reExecuteTransactions(makeBlockProposal())).rejects.toThrow(BlockBuilderNotProvidedError); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index bf9e6a29ab7..63d3201c4df 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -126,7 +126,7 @@ export class ValidatorClient extends WithTracer implements Validator { try { await this.ensureTransactionsAreAvailable(proposal); - if (this.config.validatorReEx) { + if (this.config.validatorReexecute) { this.log.verbose(`Re-executing transactions in the proposal before attesting`); await this.reExecuteTransactions(proposal); } From 5eee7ea7b0576671ad7bb072f7ec24ba9b9bedce Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:09:07 +0000 Subject: [PATCH 110/123] fix --- yarn-project/validator-client/src/validator.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 63d3201c4df..35c92a5203e 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -177,8 +177,6 @@ export class ValidatorClient extends WithTracer implements Validator { this.log.verbose(`Re-ex: Re-execution complete`); // This function will throw an error if state updates do not match - console.log('block arvhive:', block.archive.root.toString()); - console.log('proposal archive:', proposal.archive.toString()); if (!block.archive.root.equals(proposal.archive)) { this.metrics.recordFailedReexecution(proposal); throw new ReExStateMismatchError(); From 82f3946adf4815fe45115d3bb2cb8b4a39c871f5 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:10:45 +0000 Subject: [PATCH 111/123] fmt --- yarn-project/end-to-end/src/e2e_p2p/reex.test.ts | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 8c16d4aa548..2d1139b15b7 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -1,16 +1,16 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, sleep } from '@aztec/aztec.js'; -import { beforeAll, describe, it , jest } from '@jest/globals'; +/* eslint-disable-next-line no-restricted-imports */ +import { BlockProposal, getHashedSignaturePayload } from '@aztec/circuit-types'; + +import { beforeAll, describe, it, jest } from '@jest/globals'; import fs from 'fs'; import { createNodes } from '../fixtures/setup_p2p_test.js'; import { P2PNetworkTest } from './p2p_network.js'; import { submitComplexTxsTo } from './shared.js'; -/* eslint-disable-next-line no-restricted-imports */ -import { BlockProposal, getHashedSignaturePayload } from '@aztec/circuit-types'; - const NUM_NODES = 4; const NUM_TXS_PER_NODE = 1; const BOOT_NODE_UDP_PORT = 41000; @@ -69,7 +69,7 @@ describe('e2e_p2p_reex', () => { ); // Hook into the node and intercept re-execution logic, ensuring that it was infact called - const reExecutionSpies = [] + const reExecutionSpies = []; for (const node of nodes) { // Make sure the nodes submit faulty proposals, in this case a faulty proposal is one where we remove one of the transactions // Such that the calculated archive will be different! @@ -130,9 +130,8 @@ describe('e2e_p2p_reex', () => { // Expect that all of the re-execution attempts failed with an invalid root for (const spy of reExecutionSpies) { for (const result of spy.mock.results) { - await expect(result.value).rejects.toThrow("Validator Error: Re-execution state mismatch"); + await expect(result.value).rejects.toThrow('Validator Error: Re-execution state mismatch'); } } - }); }); From 108298b8a9aee441695113407ee51e7d00e27f3b Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 8 Nov 2024 15:41:14 +0000 Subject: [PATCH 112/123] fix --- yarn-project/end-to-end/src/e2e_p2p/reex.test.ts | 7 ------- 1 file changed, 7 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 2d1139b15b7..98831e441af 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -120,13 +120,6 @@ describe('e2e_p2p_reex', () => { t.logger.info('Failed to mine all txs, as planned'); } - // Check that we did call the re-execution logic - let reExecutionCalls = 0; - reExecutionSpies.forEach(spy => { - reExecutionCalls += Number((spy as any).mock.calls.length > 0); - }); - expect(reExecutionCalls).toBe(3); - // Expect that all of the re-execution attempts failed with an invalid root for (const spy of reExecutionSpies) { for (const result of spy.mock.results) { From 2c997edef04fc160c845bed44573b76a02f95523 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 8 Nov 2024 19:15:14 +0000 Subject: [PATCH 113/123] merge fix --- yarn-project/end-to-end/src/e2e_p2p/reex.test.ts | 4 ++-- yarn-project/ivc-integration/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 98831e441af..808342210ee 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -2,7 +2,7 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, sleep } from '@aztec/aztec.js'; /* eslint-disable-next-line no-restricted-imports */ -import { BlockProposal, getHashedSignaturePayload } from '@aztec/circuit-types'; +import { BlockProposal, getHashedSignaturePayload, SignatureDomainSeperator } from '@aztec/circuit-types'; import { beforeAll, describe, it, jest } from '@jest/globals'; import fs from 'fs'; @@ -86,7 +86,7 @@ describe('e2e_p2p_reex', () => { const signer = (node as any).sequencer.sequencer.validatorClient.validationService.keyStore; const newProposal = new BlockProposal( proposal.payload, - await signer.signMessage(getHashedSignaturePayload(proposal.payload)), + await signer.signMessage(getHashedSignaturePayload(proposal.payload, SignatureDomainSeperator.blockProposal)), ); return (node as any).p2pClient.p2pService.propagate(newProposal); diff --git a/yarn-project/ivc-integration/package.json b/yarn-project/ivc-integration/package.json index 0eb7a4c6abf..c74cfa2e5be 100644 --- a/yarn-project/ivc-integration/package.json +++ b/yarn-project/ivc-integration/package.json @@ -18,7 +18,7 @@ "formatting:fix:types": "NODE_OPTIONS='--max-old-space-size=8096' run -T eslint --fix ./src/types && run -T prettier -w ./src/types", "generate": "yarn generate:noir-circuits", "generate:noir-circuits": "mkdir -p ./artifacts && cp -r ../../noir-projects/mock-protocol-circuits/target/* ./artifacts && node --no-warnings --loader ts-node/esm src/scripts/generate_declaration_files.ts && node --no-warnings --loader ts-node/esm src/scripts/generate_ts_from_abi.ts && run -T prettier -w ./src/types", - "test:non-browser":"NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --testPathIgnorePatterns=browser", + "test:non-browser": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --testPathIgnorePatterns=browser", "test:browser": "./run_browser_tests.sh", "test": "yarn test:non-browser", "codegen": "yarn noir-codegen", From 464980899680a790d0895eaabf8d05c79e984205 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 13 Nov 2024 11:15:51 +0000 Subject: [PATCH 114/123] fmt --- yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts | 2 +- yarn-project/end-to-end/src/e2e_p2p/reex.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 9ad3141f8e7..5a10cf5297e 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,6 +1,6 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; +import { type AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; import { EthAddress } from '@aztec/circuits.js'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 808342210ee..631f2910596 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -2,7 +2,7 @@ import { type AztecNodeService } from '@aztec/aztec-node'; import { type SentTx, sleep } from '@aztec/aztec.js'; /* eslint-disable-next-line no-restricted-imports */ -import { BlockProposal, getHashedSignaturePayload, SignatureDomainSeperator } from '@aztec/circuit-types'; +import { BlockProposal, SignatureDomainSeperator, getHashedSignaturePayload } from '@aztec/circuit-types'; import { beforeAll, describe, it, jest } from '@jest/globals'; import fs from 'fs'; From 56237edd0c984df61054ac4929a80e02413bd7da Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 13 Nov 2024 11:34:54 +0000 Subject: [PATCH 115/123] fix: logging change --- yarn-project/validator-client/src/validator.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 35c92a5203e..7ced2639ab1 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -121,7 +121,7 @@ export class ValidatorClient extends WithTracer implements Validator { // Check that all of the tranasctions in the proposal are available in the tx pool before attesting this.log.verbose(`request to attest`, { archive: proposal.payload.archive.toString(), - txHashes: proposal.payload.txHashes, + txHashes: proposal.payload.txHashes.map(txHash => txHash.toString()), }); try { await this.ensureTransactionsAreAvailable(proposal); From f97ce86abd7aa29d329a0fbd23f9bffebed0e3d6 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:35:52 +0000 Subject: [PATCH 116/123] fix: merge fix --- .../circuits.js/src/structs/public_data_update_request.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/yarn-project/circuits.js/src/structs/public_data_update_request.ts b/yarn-project/circuits.js/src/structs/public_data_update_request.ts index 8ceae9850c1..ab1973fb081 100644 --- a/yarn-project/circuits.js/src/structs/public_data_update_request.ts +++ b/yarn-project/circuits.js/src/structs/public_data_update_request.ts @@ -4,6 +4,9 @@ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/ import { inspect } from 'util'; +import { computePublicDataTreeLeafSlot } from '../hash/hash.js'; +import { type ContractStorageUpdateRequest } from './contract_storage_update_request.js'; + // TO BE REMOVED. /** * Write operations on the public data tree including the previous value. From 19e17cce47c384b319d8067d06c6b0613e0fd54f Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Thu, 21 Nov 2024 18:28:02 +0000 Subject: [PATCH 117/123] fix: boxes --- boxes/docker-compose.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/boxes/docker-compose.yml b/boxes/docker-compose.yml index 2ac3069334e..b06cca7ebc8 100644 --- a/boxes/docker-compose.yml +++ b/boxes/docker-compose.yml @@ -1,8 +1,11 @@ version: "3" services: ethereum: - image: ghcr.io/foundry-rs/foundry:v1.0.0 - command: "'anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337'" + image: aztecprotocol/foundry:25f24e677a6a32a62512ad4f561995589ac2c7dc-${ARCH_TAG:-amd64} + entrypoint: > + sh -c ' + anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337 + ' aztec: image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} From 205e03bb81fc592f50de209f81ce2442eb168d3b Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 22 Nov 2024 15:21:21 +0800 Subject: [PATCH 118/123] fix: prover-agent.yaml syntax --- spartan/aztec-network/templates/prover-agent.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 486a5f62bbe..34f9648f3ba 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -30,6 +30,7 @@ spec: operator: "Equal" value: "true" effect: "NoSchedule" + {{- end }} serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true From 66328f0109cee1c2e5dfa07987ccf641480ef17f Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 22 Nov 2024 07:34:13 +0000 Subject: [PATCH 119/123] try to fix kind test runner --- .github/workflows/ci.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 65944a01641..70cda41241c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,6 +46,7 @@ jobs: outputs: username: ${{ steps.compute_username.outputs.username }} avm-transpiler: ${{ steps.filter.outputs.avm-transpiler }} + aztec-network: ${{ steps.filter.outputs.aztec-network }} build-images: ${{ steps.filter.outputs.build-images }} barretenberg: ${{ steps.filter.outputs.barretenberg }} barretenberg-cpp: ${{ steps.filter.outputs.barretenberg-cpp }} @@ -90,6 +91,8 @@ jobs: - 'build-images/**' noir: - 'noir/**' + aztec-network: + - 'aztec-network/**' avm-transpiler: - 'avm-transpiler/**' l1-contracts: @@ -661,7 +664,7 @@ jobs: kind-smoke-test: needs: [build, configure] - if: needs.configure.outputs.yarn-project == 'true' || github.ref_name == 'master' + if: needs.configure.outputs.aztec-network == 'true' || needs.configure.outputs.yarn-project == 'true' || github.ref_name == 'master' runs-on: ${{ needs.configure.outputs.username }}-x86 steps: - uses: actions/checkout@v4 @@ -692,8 +695,8 @@ jobs: # note: proving disabled kind-network-test: needs: [build, configure] - if: contains(github.event.pull_request.labels.*.name, 'network-all') || (needs.configure.outputs.yarn-project == 'true' && github.ref_name == 'master') - runs-on: ${{ needs.configure.outputs.username }}-x86 + if: contains(github.event.pull_request.labels.*.name, 'network-all') || needs.configure.outputs.aztec-network == 'true' || needs.configure.outputs.yarn-project == 'true' || github.ref_name == 'master' + runs-on: ubuntu-latest strategy: fail-fast: false matrix: From ca52e6db427c6f16f9836cb4f4c42acb31fb5d63 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 22 Nov 2024 16:55:42 +0800 Subject: [PATCH 120/123] Update ci.yml --- .github/workflows/ci.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70cda41241c..2870479cf47 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -725,9 +725,6 @@ jobs: steps: - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } - - uses: ./.github/ci-setup-action - with: - concurrency_key: kind-network-${{ matrix.config.test }} - name: Setup and KIND Network Test timeout-minutes: ${{ matrix.config.timeout }} uses: ./.github/ensure-tester-with-images From 87dae4b6e9acde61d74207d21308120b78be1472 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:27:09 +0000 Subject: [PATCH 121/123] fix --- yarn-project/end-to-end/src/fixtures/snapshot_manager.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 72baa06a9e5..3a85d0ff79b 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -64,6 +64,7 @@ export function createSnapshotManager( config: Partial = {}, deployL1ContractsArgs: Partial = { assumeProvenThrough: Number.MAX_SAFE_INTEGER, + initialValidators: [], }, ) { return dataPath From 36dd9d576d98df40aa2669db94929e4556faca35 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:52:04 +0000 Subject: [PATCH 122/123] Revert "Update ci.yml" This reverts commit ca52e6db427c6f16f9836cb4f4c42acb31fb5d63. --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2870479cf47..70cda41241c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -725,6 +725,9 @@ jobs: steps: - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: kind-network-${{ matrix.config.test }} - name: Setup and KIND Network Test timeout-minutes: ${{ matrix.config.timeout }} uses: ./.github/ensure-tester-with-images From 2620c83b06ad7cb3d17d370ccc088b0d2f93d086 Mon Sep 17 00:00:00 2001 From: Maddiaa0 <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:52:15 +0000 Subject: [PATCH 123/123] Revert "try to fix kind test runner" This reverts commit 66328f0109cee1c2e5dfa07987ccf641480ef17f. --- .github/workflows/ci.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70cda41241c..65944a01641 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,7 +46,6 @@ jobs: outputs: username: ${{ steps.compute_username.outputs.username }} avm-transpiler: ${{ steps.filter.outputs.avm-transpiler }} - aztec-network: ${{ steps.filter.outputs.aztec-network }} build-images: ${{ steps.filter.outputs.build-images }} barretenberg: ${{ steps.filter.outputs.barretenberg }} barretenberg-cpp: ${{ steps.filter.outputs.barretenberg-cpp }} @@ -91,8 +90,6 @@ jobs: - 'build-images/**' noir: - 'noir/**' - aztec-network: - - 'aztec-network/**' avm-transpiler: - 'avm-transpiler/**' l1-contracts: @@ -664,7 +661,7 @@ jobs: kind-smoke-test: needs: [build, configure] - if: needs.configure.outputs.aztec-network == 'true' || needs.configure.outputs.yarn-project == 'true' || github.ref_name == 'master' + if: needs.configure.outputs.yarn-project == 'true' || github.ref_name == 'master' runs-on: ${{ needs.configure.outputs.username }}-x86 steps: - uses: actions/checkout@v4 @@ -695,8 +692,8 @@ jobs: # note: proving disabled kind-network-test: needs: [build, configure] - if: contains(github.event.pull_request.labels.*.name, 'network-all') || needs.configure.outputs.aztec-network == 'true' || needs.configure.outputs.yarn-project == 'true' || github.ref_name == 'master' - runs-on: ubuntu-latest + if: contains(github.event.pull_request.labels.*.name, 'network-all') || (needs.configure.outputs.yarn-project == 'true' && github.ref_name == 'master') + runs-on: ${{ needs.configure.outputs.username }}-x86 strategy: fail-fast: false matrix: