diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 6b19b80e1d6..d4ccad2712e 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -60,7 +60,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { IProofCommitmentEscrow public immutable PROOF_COMMITMENT_ESCROW; uint256 public immutable VERSION; IFeeJuicePortal public immutable FEE_JUICE_PORTAL; - IVerifier public blockProofVerifier; + IVerifier public epochProofVerifier; ChainTips public tips; DataStructures.EpochProofClaim public proofClaim; @@ -78,17 +78,12 @@ contract Rollup is Leonidas, IRollup, ITestRollup { // Testing only. This should be removed eventually. uint256 private assumeProvenThroughBlockNumber; - // Listed at the end of the contract to avoid changing storage slots - // TODO(palla/prover) Drop blockProofVerifier and move this verifier to that slot - IVerifier public epochProofVerifier; - constructor( IFeeJuicePortal _fpcJuicePortal, bytes32 _vkTreeRoot, address _ares, address[] memory _validators ) Leonidas(_ares) { - blockProofVerifier = new MockVerifier(); epochProofVerifier = new MockVerifier(); FEE_JUICE_PORTAL = _fpcJuicePortal; PROOF_COMMITMENT_ESCROW = new MockProofCommitmentEscrow(); @@ -138,17 +133,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { assumeProvenThroughBlockNumber = blockNumber; } - /** - * @notice Set the verifier contract - * - * @dev This is only needed for testing, and should be removed - * - * @param _verifier - The new verifier contract - */ - function setBlockVerifier(address _verifier) external override(ITestRollup) onlyOwner { - blockProofVerifier = IVerifier(_verifier); - } - /** * @notice Set the verifier contract * @@ -194,147 +178,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { claimEpochProofRight(_quote); } - /** - * @notice Submit a proof for a block in the pending chain - * - * @dev TODO(#7346): Verify root proofs rather than block root when batch rollups are integrated. - * - * @dev Will emit `L2ProofVerified` if the proof is valid - * - * @dev Will throw if: - * - The block number is past the pending chain - * - The last archive root of the header does not match the archive root of parent block - * - The archive root of the header does not match the archive root of the proposed block - * - The proof is invalid - * - * @dev We provide the `_archive` even if it could be read from storage itself because it allow for - * better error messages. Without passing it, we would just have a proof verification failure. - * - * @dev Following the `BlockLog` struct assumption - * - * @param _header - The header of the block (should match the block in the pending chain) - * @param _archive - The archive root of the block (should match the block in the pending chain) - * @param _proverId - The id of this block's prover - * @param _aggregationObject - The aggregation object for the proof - * @param _proof - The proof to verify - */ - function submitBlockRootProof( - bytes calldata _header, - bytes32 _archive, - bytes32 _proverId, - bytes calldata _aggregationObject, - bytes calldata _proof - ) external override(IRollup) { - if (_canPrune()) { - _prune(); - } - HeaderLib.Header memory header = HeaderLib.decode(_header); - - if (header.globalVariables.blockNumber > tips.pendingBlockNumber) { - revert Errors.Rollup__TryingToProveNonExistingBlock(); - } - - // @note This implicitly also ensures that we have not already proven, since - // the value `tips.provenBlockNumber` is incremented at the end of this function - if (header.globalVariables.blockNumber != tips.provenBlockNumber + 1) { - revert Errors.Rollup__NonSequentialProving(); - } - - bytes32 expectedLastArchive = blocks[header.globalVariables.blockNumber - 1].archive; - // We do it this way to provide better error messages than passing along the storage values - if (header.lastArchive.root != expectedLastArchive) { - revert Errors.Rollup__InvalidArchive(expectedLastArchive, header.lastArchive.root); - } - - bytes32 expectedArchive = blocks[header.globalVariables.blockNumber].archive; - if (_archive != expectedArchive) { - revert Errors.Rollup__InvalidProposedArchive(expectedArchive, _archive); - } - - // TODO(#7346): Currently verifying block root proofs until batch rollups fully integrated. - // Hence the below pub inputs are BlockRootOrBlockMergePublicInputs, which are larger than - // the planned set (RootRollupPublicInputs), for the interim. - // Public inputs are not fully verified (TODO(#7373)) - - bytes32[] memory publicInputs = new bytes32[]( - Constants.BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH - ); - - // From block_root_or_block_merge_public_inputs.nr: BlockRootOrBlockMergePublicInputs. - // previous_archive.root: the previous archive tree root - publicInputs[0] = expectedLastArchive; - // previous_archive.next_available_leaf_index: the previous archive next available index - publicInputs[1] = bytes32(header.globalVariables.blockNumber); - - // new_archive.root: the new archive tree root - publicInputs[2] = expectedArchive; - // this is the _next_ available leaf in the archive tree - // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) - // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N - // new_archive.next_available_leaf_index: the new archive next available index - publicInputs[3] = bytes32(header.globalVariables.blockNumber + 1); - - // previous_block_hash: the block hash just preceding this block (will eventually become the end_block_hash of the prev batch) - publicInputs[4] = blocks[header.globalVariables.blockNumber - 1].blockHash; - - // end_block_hash: the current block hash (will eventually become the hash of the final block proven in a batch) - publicInputs[5] = blocks[header.globalVariables.blockNumber].blockHash; - - // For block root proof outputs, we have a block 'range' of just 1 block => start and end globals are the same - bytes32[] memory globalVariablesFields = HeaderLib.toFields(header.globalVariables); - for (uint256 i = 0; i < globalVariablesFields.length; i++) { - // start_global_variables - publicInputs[i + 6] = globalVariablesFields[i]; - // end_global_variables - publicInputs[globalVariablesFields.length + i + 6] = globalVariablesFields[i]; - } - // out_hash: root of this block's l2 to l1 message tree (will eventually be root of roots) - publicInputs[24] = header.contentCommitment.outHash; - - // For block root proof outputs, we have a single recipient-value fee payment pair, - // but the struct contains space for the max (32) => we keep 31*2=62 fields blank to represent it. - // fees: array of recipient-value pairs, for a single block just one entry (will eventually be filled and paid out here) - publicInputs[25] = bytes32(uint256(uint160(header.globalVariables.coinbase))); - publicInputs[26] = bytes32(header.totalFees); - // publicInputs[27] -> publicInputs[88] left blank for empty fee array entries - - // vk_tree_root - publicInputs[89] = vkTreeRoot; - // prover_id: id of current block range's prover - publicInputs[90] = _proverId; - - // the block proof is recursive, which means it comes with an aggregation object - // this snippet copies it into the public inputs needed for verification - // it also guards against empty _aggregationObject used with mocked proofs - uint256 aggregationLength = _aggregationObject.length / 32; - for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) { - bytes32 part; - assembly { - part := calldataload(add(_aggregationObject.offset, mul(i, 32))) - } - publicInputs[i + 91] = part; - } - - if (!blockProofVerifier.verify(_proof, publicInputs)) { - revert Errors.Rollup__InvalidProof(); - } - - tips.provenBlockNumber = header.globalVariables.blockNumber; - - for (uint256 i = 0; i < 32; i++) { - address coinbase = address(uint160(uint256(publicInputs[25 + i * 2]))); - uint256 fees = uint256(publicInputs[26 + i * 2]); - - if (coinbase != address(0) && fees > 0) { - // @note This will currently fail if there are insufficient funds in the bridge - // which WILL happen for the old version after an upgrade where the bridge follow. - // Consider allowing a failure. See #7938. - FEE_JUICE_PORTAL.distributeFees(coinbase, fees); - } - } - emit L2ProofVerified(header.globalVariables.blockNumber, _proverId); - } - /** * @notice Submit a proof for an epoch in the pending chain * diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index f9e34caf25c..62974893e69 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -11,7 +11,6 @@ import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; interface ITestRollup { - function setBlockVerifier(address _verifier) external; function setEpochVerifier(address _verifier) external; function setVkTreeRoot(bytes32 _vkTreeRoot) external; function setAssumeProvenThroughBlockNumber(uint256 blockNumber) external; @@ -52,14 +51,6 @@ interface IRollup { DataStructures.SignedEpochProofQuote calldata _quote ) external; - function submitBlockRootProof( - bytes calldata _header, - bytes32 _archive, - bytes32 _proverId, - bytes calldata _aggregationObject, - bytes calldata _proof - ) external; - function submitEpochRootProof( uint256 _epochSize, bytes32[7] calldata _args, @@ -100,18 +91,6 @@ interface IRollup { Epoch provenEpochNumber ); - // TODO(#7346): Integrate batch rollups - // function submitRootProof( - // bytes32 _previousArchive, - // bytes32 _archive, - // bytes32 outHash, - // address[32] calldata coinbases, - // uint256[32] calldata fees, - // bytes32 _proverId, - // bytes calldata _aggregationObject, - // bytes calldata _proof - // ) external; - function archive() external view returns (bytes32); function archiveAt(uint256 _blockNumber) external view returns (bytes32); function getProvenBlockNumber() external view returns (uint256); diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 12516f2ff60..a3035620128 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -94,7 +94,6 @@ library Constants { uint256 internal constant BLOCK_MERGE_ROLLUP_INDEX = 23; uint256 internal constant ROOT_ROLLUP_INDEX = 24; uint256 internal constant BLOCK_ROOT_ROLLUP_EMPTY_INDEX = 25; - uint256 internal constant BLOCK_ROOT_ROLLUP_FINAL_INDEX = 26; uint256 internal constant FUNCTION_SELECTOR_NUM_BYTES = 4; uint256 internal constant INITIALIZATION_SLOT_SEPARATOR = 1000000000; uint256 internal constant INITIAL_L2_BLOCK_NUM = 1; diff --git a/noir-projects/noir-protocol-circuits/Nargo.template.toml b/noir-projects/noir-protocol-circuits/Nargo.template.toml index 9db35a94c9e..7cb2f3c88ce 100644 --- a/noir-projects/noir-protocol-circuits/Nargo.template.toml +++ b/noir-projects/noir-protocol-circuits/Nargo.template.toml @@ -35,6 +35,5 @@ members = [ "crates/rollup-block-merge", "crates/rollup-block-root", "crates/rollup-block-root-empty", - "crates/rollup-block-root-final", "crates/rollup-root", ] diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-root-final/Nargo.toml b/noir-projects/noir-protocol-circuits/crates/rollup-block-root-final/Nargo.toml deleted file mode 100644 index 2d827d6e167..00000000000 --- a/noir-projects/noir-protocol-circuits/crates/rollup-block-root-final/Nargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "rollup_block_root_final" -type = "bin" -authors = [""] -compiler_version = ">=0.18.0" - -[dependencies] -rollup_lib = { path = "../rollup-lib" } -types = { path = "../types" } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-root-final/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-block-root-final/src/main.nr deleted file mode 100644 index b23b532e957..00000000000 --- a/noir-projects/noir-protocol-circuits/crates/rollup-block-root-final/src/main.nr +++ /dev/null @@ -1,7 +0,0 @@ -use dep::rollup_lib::block_root::{BlockRootRollupInputs, BlockRootOrBlockMergePublicInputs}; - -// This is a non-recursive variant of the rollup-block-root. We use it so we can generate proofs that can be verified on L1, until we -// drop support for proving single blocks and move to epoch proving completely. -fn main(inputs: BlockRootRollupInputs) -> pub BlockRootOrBlockMergePublicInputs { - inputs.block_root_rollup_circuit() -} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index cbe5021a9a4..c791b813df7 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -120,7 +120,6 @@ global BLOCK_ROOT_ROLLUP_INDEX: u32 = 22; global BLOCK_MERGE_ROLLUP_INDEX: u32 = 23; global ROOT_ROLLUP_INDEX: u32 = 24; global BLOCK_ROOT_ROLLUP_EMPTY_INDEX: u32 = 25; -global BLOCK_ROOT_ROLLUP_FINAL_INDEX: u32 = 26; // MISC CONSTANTS global FUNCTION_SELECTOR_NUM_BYTES: Field = 4; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr index 3d91c8ce182..17e8b772262 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr @@ -5,8 +5,7 @@ use crate::constants::{ EMPTY_NESTED_INDEX, PRIVATE_KERNEL_EMPTY_INDEX, PUBLIC_KERNEL_INNER_INDEX, PUBLIC_KERNEL_MERGE_INDEX, PUBLIC_KERNEL_TAIL_INDEX, BASE_PARITY_INDEX, ROOT_PARITY_INDEX, BASE_ROLLUP_INDEX, MERGE_ROLLUP_INDEX, BLOCK_ROOT_ROLLUP_INDEX, BLOCK_MERGE_ROLLUP_INDEX, - ROOT_ROLLUP_INDEX, PRIVATE_KERNEL_RESET_TINY_INDEX, BLOCK_ROOT_ROLLUP_EMPTY_INDEX, - BLOCK_ROOT_ROLLUP_FINAL_INDEX + ROOT_ROLLUP_INDEX, PRIVATE_KERNEL_RESET_TINY_INDEX, BLOCK_ROOT_ROLLUP_EMPTY_INDEX }; use crate::merkle_tree::merkle_tree::MerkleTree; @@ -43,7 +42,6 @@ pub fn get_vk_merkle_tree() -> MerkleTree { leaves[BLOCK_MERGE_ROLLUP_INDEX] = 23; leaves[ROOT_ROLLUP_INDEX] = 24; leaves[BLOCK_ROOT_ROLLUP_EMPTY_INDEX] = 25; - leaves[BLOCK_ROOT_ROLLUP_FINAL_INDEX] = 26; MerkleTree::new(leaves) } diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 6b3b5228220..fd11701b11d 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -239,7 +239,7 @@ export async function retrieveL2ProofsFromRollup( const lastProcessedL1BlockNumber = logs.length > 0 ? logs.at(-1)!.l1BlockNumber : searchStartBlock - 1n; for (const { txHash, proverId, l2BlockNumber } of logs) { - const proofData = await getProofFromSubmitProofTx(publicClient, txHash, l2BlockNumber, proverId); + const proofData = await getProofFromSubmitProofTx(publicClient, txHash, proverId); retrievedData.push({ proof: proofData.proof, proverId: proofData.proverId, l2BlockNumber, txHash }); } return { @@ -267,31 +267,17 @@ export type SubmitBlockProof = { export async function getProofFromSubmitProofTx( publicClient: PublicClient, txHash: `0x${string}`, - l2BlockNum: bigint, expectedProverId: Fr, ): Promise { const { input: data } = await publicClient.getTransaction({ hash: txHash }); const { functionName, args } = decodeFunctionData({ abi: RollupAbi, data }); let proverId: Fr; - let blockNumber: bigint; let archiveRoot: Fr; let aggregationObject: Buffer; let proof: Proof; - if (functionName === 'submitBlockRootProof') { - const [headerHex, archiveHex, proverIdHex, aggregationObjectHex, proofHex] = args!; - const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex))); - aggregationObject = Buffer.from(hexToBytes(aggregationObjectHex)); - proverId = Fr.fromString(proverIdHex); - proof = Proof.fromBuffer(Buffer.from(hexToBytes(proofHex))); - archiveRoot = Fr.fromString(archiveHex); - - blockNumber = header.globalVariables.blockNumber.toBigInt(); - if (blockNumber !== l2BlockNum) { - throw new Error(`Block number mismatch: expected ${l2BlockNum} but got ${blockNumber}`); - } - } else if (functionName === 'submitEpochRootProof') { + if (functionName === 'submitEpochRootProof') { const [_epochSize, nestedArgs, _fees, aggregationObjectHex, proofHex] = args!; aggregationObject = Buffer.from(hexToBytes(aggregationObjectHex)); proverId = Fr.fromString(nestedArgs[6]); diff --git a/yarn-project/bb-prover/src/honk.ts b/yarn-project/bb-prover/src/honk.ts index a01776ff46a..eb9e13236f2 100644 --- a/yarn-project/bb-prover/src/honk.ts +++ b/yarn-project/bb-prover/src/honk.ts @@ -2,10 +2,7 @@ import { type ProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; export type UltraHonkFlavor = 'ultra_honk' | 'ultra_keccak_honk'; -const UltraKeccakHonkCircuits = [ - 'BlockRootRollupFinalArtifact', - 'RootRollupArtifact', -] as const satisfies ProtocolArtifact[]; +const UltraKeccakHonkCircuits = ['RootRollupArtifact'] as const satisfies ProtocolArtifact[]; export type UltraKeccakHonkProtocolArtifact = (typeof UltraKeccakHonkCircuits)[number]; export type UltraHonkProtocolArtifact = Exclude; diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index 1e945b3c58d..b99dff1748d 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -406,32 +406,6 @@ export class BBNativeRollupProver implements ServerCircuitProver { return makePublicInputsAndRecursiveProof(circuitOutput, proof, verificationKey); } - /** - * Simulates the block root rollup circuit from its inputs. - * Returns a non-recursive proof to verify on L1. - * @dev TODO(palla/prover): This is a temporary workaround to get the proof to L1 with the old block flow. - * @param input - Inputs to the circuit. - * @returns The public inputs as outputs of the simulation. - */ - public async getBlockRootRollupFinalProof( - input: BlockRootRollupInputs, - ): Promise> { - const { circuitOutput, proof } = await this.createProof( - input, - 'BlockRootRollupFinalArtifact', - convertBlockRootRollupInputsToWitnessMap, - convertBlockRootRollupOutputsFromWitnessMap, - ); - - const recursiveProof = makeRecursiveProofFromBinary(proof, NESTED_RECURSIVE_PROOF_LENGTH); - - const verificationKey = await this.getVerificationKeyDataForCircuit('BlockRootRollupFinalArtifact'); - - await this.verifyProof('BlockRootRollupFinalArtifact', proof); - - return makePublicInputsAndRecursiveProof(circuitOutput, recursiveProof, verificationKey); - } - /** * Simulates the block merge rollup circuit from its inputs. * @param input - Inputs to the circuit. diff --git a/yarn-project/bb-prover/src/stats.ts b/yarn-project/bb-prover/src/stats.ts index 1e2bc0143a3..2dbc3f94e23 100644 --- a/yarn-project/bb-prover/src/stats.ts +++ b/yarn-project/bb-prover/src/stats.ts @@ -51,8 +51,6 @@ export function mapProtocolArtifactNameToCircuitName( return 'empty-nested'; case 'PrivateKernelEmptyArtifact': return 'private-kernel-empty'; - case 'BlockRootRollupFinalArtifact': - return 'block-root-rollup-final'; default: { const _foo: never = artifact; throw new Error(`Unknown circuit type: ${artifact}`); diff --git a/yarn-project/bb-prover/src/test/test_circuit_prover.ts b/yarn-project/bb-prover/src/test/test_circuit_prover.ts index 89df65b7fa5..7dcb8825088 100644 --- a/yarn-project/bb-prover/src/test/test_circuit_prover.ts +++ b/yarn-project/bb-prover/src/test/test_circuit_prover.ts @@ -385,12 +385,6 @@ export class TestCircuitProver implements ServerCircuitProver { ); } - public getBlockRootRollupFinalProof( - input: BlockRootRollupInputs, - ): Promise> { - return this.getBlockRootRollupProof(input); - } - /** * Simulates the block merge rollup circuit from its inputs. * @param input - Inputs to the circuit. diff --git a/yarn-project/circuit-types/src/interfaces/block-builder.ts b/yarn-project/circuit-types/src/interfaces/block-builder.ts new file mode 100644 index 00000000000..3be4a1f07e5 --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/block-builder.ts @@ -0,0 +1,28 @@ +import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; + +import { type L2Block } from '../l2_block.js'; +import { type ProcessedTx } from '../tx/processed_tx.js'; +import { type ProcessedTxHandler } from './processed-tx-handler.js'; + +/** The interface to a block builder. Generates an L2 block out of a set of processed txs. */ +export interface BlockBuilder extends ProcessedTxHandler { + /** + * Prepares to build a new block. Updates the L1 to L2 message tree. + * @param numTxs - The complete size of the block. + * @param globalVariables - The global variables for this block. + * @param l1ToL2Messages - The set of L1 to L2 messages to be included in this block. + */ + startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise; + + /** + * Adds a processed tx to the block. Updates world state with the effects from this tx. + * @param tx - The transaction to be added. + */ + addNewTx(tx: ProcessedTx): Promise; + + /** + * Pads the block with empty txs if it hasn't reached the declared number of txs. + * Assembles the block and updates the archive tree. + */ + setBlockCompleted(): Promise; +} diff --git a/yarn-project/circuit-types/src/interfaces/block-prover.ts b/yarn-project/circuit-types/src/interfaces/block-prover.ts deleted file mode 100644 index a2a053d8a37..00000000000 --- a/yarn-project/circuit-types/src/interfaces/block-prover.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { type Fr, type GlobalVariables, type Proof, type RootRollupPublicInputs } from '@aztec/circuits.js'; - -import { type L2Block } from '../l2_block.js'; -import { type ProcessedTx } from '../tx/processed_tx.js'; - -export enum PROVING_STATUS { - SUCCESS, - FAILURE, -} - -export type ProvingSuccess = { - status: PROVING_STATUS.SUCCESS; -}; - -export type ProvingFailure = { - status: PROVING_STATUS.FAILURE; - reason: string; -}; - -export type ProvingResult = ProvingSuccess | ProvingFailure; - -export type ProvingTicket = { - provingPromise: Promise; -}; - -export type SimulationBlockResult = { - block: L2Block; -}; - -export type ProvingBlockResult = SimulationBlockResult & { - proof: Proof; - aggregationObject: Fr[]; -}; - -export type ProvingEpochResult = { publicInputs: RootRollupPublicInputs; proof: Proof }; - -/** Receives processed txs as part of block simulation or proving. */ -export interface ProcessedTxHandler { - /** - * Add a processed transaction to the current block. - * @param tx - The transaction to be added. - */ - addNewTx(tx: ProcessedTx): Promise; -} - -/** The interface to a block simulator. Generates an L2 block out of a set of processed txs by calling into the Aztec circuits. */ -export interface BlockSimulator extends ProcessedTxHandler { - /** - * Prepares to build a new block. - * @param numTxs - The complete size of the block, must be a power of 2 - * @param globalVariables - The global variables for this block - * @param l1ToL2Messages - The set of L1 to L2 messages to be included in this block - */ - startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise; - - /** Cancels the block currently being processed. Processes already in progress built may continue but further proofs should not be started. */ - cancel(): void; - - /** Performs the final archive tree insertion for this block and returns the L2Block. */ - finaliseBlock(): Promise; - - /** - * Mark the block as having all the transactions it is going to contain. - * Will pad the block to its complete size with empty transactions and prove all the way to the root rollup. - */ - setBlockCompleted(): Promise; -} - -/** The interface to a block prover. Generates a root rollup proof out of a set of processed txs by recursively proving Aztec circuits. */ -export interface BlockProver extends BlockSimulator { - /** Returns an identifier for the prover or zero if not set. */ - getProverId(): Fr; - - /** Performs the final archive tree insertion for this block and returns the L2Block. */ - finaliseBlock(): Promise; -} - -export interface EpochProver extends BlockProver { - startNewEpoch(epochNumber: number, totalNumBlocks: number): ProvingTicket; - - setEpochCompleted(): void; - - finaliseEpoch(): ProvingEpochResult; -} diff --git a/yarn-project/circuit-types/src/interfaces/epoch-prover.ts b/yarn-project/circuit-types/src/interfaces/epoch-prover.ts new file mode 100644 index 00000000000..36f5e911b2d --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/epoch-prover.ts @@ -0,0 +1,41 @@ +import { type Fr, type Proof, type RootRollupPublicInputs } from '@aztec/circuits.js'; + +import { type L2Block } from '../l2_block.js'; +import { type BlockBuilder } from './block-builder.js'; + +/** + * Coordinates the proving of an entire epoch. + * + * Expected usage: + * ``` + * startNewEpoch + * foreach block { + * addNewBlock + * foreach tx { + * addTx + * } + * setBlockCompleted + * } + * finaliseEpoch + * ``` + */ +export interface EpochProver extends BlockBuilder { + /** + * Starts a new epoch. Must be the first method to be called. + * @param epochNumber - The epoch number. + * @param totalNumBlocks - The total number of blocks expected in the epoch (must be at least one). + **/ + startNewEpoch(epochNumber: number, totalNumBlocks: number): void; + + /** Pads the epoch with empty block roots if needed and blocks until proven. Throws if proving has failed. */ + finaliseEpoch(): Promise<{ publicInputs: RootRollupPublicInputs; proof: Proof }>; + + /** Cancels all proving jobs. */ + cancel(): void; + + /** Returns an identifier for the prover or zero if not set. */ + getProverId(): Fr; + + /** Returns the block assembled at a given index (zero-based) within the epoch. */ + getBlock(index: number): L2Block; +} diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 63a79b5a4be..89287fa7289 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -1,10 +1,12 @@ export * from './aztec-node.js'; -export * from './block-prover.js'; +export * from './block-builder.js'; export * from './configs.js'; +export * from './epoch-prover.js'; export * from './l2_block_number.js'; export * from './merkle_tree_operations.js'; export * from './nullifier_tree.js'; export * from './private_kernel_prover.js'; +export * from './processed-tx-handler.js'; export * from './prover-client.js'; export * from './prover-coordination.js'; export * from './proving-job.js'; diff --git a/yarn-project/circuit-types/src/interfaces/processed-tx-handler.ts b/yarn-project/circuit-types/src/interfaces/processed-tx-handler.ts new file mode 100644 index 00000000000..2793b35bbef --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/processed-tx-handler.ts @@ -0,0 +1,10 @@ +import { type ProcessedTx } from '../tx/processed_tx.js'; + +/** Receives processed txs as part of block simulation or proving. */ +export interface ProcessedTxHandler { + /** + * Handles a processed txs. + * @param tx - The transaction to be handled. + */ + addNewTx(tx: ProcessedTx): Promise; +} diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts index e7648c0ada8..8f3d9bb7a69 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-client.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -2,7 +2,7 @@ import { type TxHash } from '@aztec/circuit-types'; import { Fr } from '@aztec/circuits.js'; import { type ConfigMappingsType, booleanConfigHelper, numberConfigHelper } from '@aztec/foundation/config'; -import { type EpochProver } from './block-prover.js'; +import { type EpochProver } from './epoch-prover.js'; import { type MerkleTreeOperations } from './merkle_tree_operations.js'; import { type ProvingJobSource } from './proving-job.js'; diff --git a/yarn-project/circuit-types/src/interfaces/proving-job.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts index bee165bc03e..42ac7ae3dda 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -78,7 +78,6 @@ export enum ProvingRequestType { MERGE_ROLLUP, EMPTY_BLOCK_ROOT_ROLLUP, BLOCK_ROOT_ROLLUP, - BLOCK_ROOT_ROLLUP_FINAL, BLOCK_MERGE_ROLLUP, ROOT_ROLLUP, @@ -108,8 +107,6 @@ export function mapProvingRequestTypeToCircuitName(type: ProvingRequestType): Ci return 'empty-block-root-rollup'; case ProvingRequestType.BLOCK_ROOT_ROLLUP: return 'block-root-rollup'; - case ProvingRequestType.BLOCK_ROOT_ROLLUP_FINAL: - return 'block-root-rollup-final'; case ProvingRequestType.BLOCK_MERGE_ROLLUP: return 'block-merge-rollup'; case ProvingRequestType.ROOT_ROLLUP: @@ -172,10 +169,6 @@ export type ProvingRequest = type: ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP; inputs: EmptyBlockRootRollupInputs; } - | { - type: ProvingRequestType.BLOCK_ROOT_ROLLUP_FINAL; - inputs: BlockRootRollupInputs; - } | { type: ProvingRequestType.BLOCK_MERGE_ROLLUP; inputs: BlockMergeRollupInputs; @@ -205,7 +198,6 @@ export type ProvingRequestPublicInputs = { [ProvingRequestType.MERGE_ROLLUP]: PublicInputsAndRecursiveProof; [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; [ProvingRequestType.BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BLOCK_ROOT_ROLLUP_FINAL]: PublicInputsAndRecursiveProof; [ProvingRequestType.BLOCK_MERGE_ROLLUP]: PublicInputsAndRecursiveProof; [ProvingRequestType.ROOT_ROLLUP]: PublicInputsAndRecursiveProof; diff --git a/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts b/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts index 32761bb9169..e59595a1784 100644 --- a/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts +++ b/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts @@ -96,16 +96,6 @@ export interface ServerCircuitProver { epochNumber?: number, ): Promise>; - /** - * Creates a proof for the given input. - * @param input - Input to the circuit. - */ - getBlockRootRollupFinalProof( - input: BlockRootRollupInputs, - signal?: AbortSignal, - epochNumber?: number, - ): Promise>; - /** * Creates a proof for the given input. * @param input - Input to the circuit. diff --git a/yarn-project/circuit-types/src/stats/stats.ts b/yarn-project/circuit-types/src/stats/stats.ts index 5e1bc0568e7..def250c0495 100644 --- a/yarn-project/circuit-types/src/stats/stats.ts +++ b/yarn-project/circuit-types/src/stats/stats.ts @@ -78,7 +78,6 @@ export type CircuitName = | 'merge-rollup' | 'block-root-rollup' | 'empty-block-root-rollup' - | 'block-root-rollup-final' | 'block-merge-rollup' | 'root-rollup' | 'private-kernel-init' diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 8888fa96775..5e60d76ff69 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -80,7 +80,6 @@ export const BLOCK_ROOT_ROLLUP_INDEX = 22; export const BLOCK_MERGE_ROLLUP_INDEX = 23; export const ROOT_ROLLUP_INDEX = 24; export const BLOCK_ROOT_ROLLUP_EMPTY_INDEX = 25; -export const BLOCK_ROOT_ROLLUP_FINAL_INDEX = 26; export const FUNCTION_SELECTOR_NUM_BYTES = 4; export const INITIALIZATION_SLOT_SEPARATOR = 1000000000; export const INITIAL_L2_BLOCK_NUM = 1; diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts index 675905d1b8a..f2a9f8b8a6a 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts @@ -47,7 +47,6 @@ export async function deployUltraHonkVerifier( // but also has a reference to L1 artifacts and bb-prover. const setupVerifier = async ( artifact: Parameters<(typeof verifier)['generateSolidityContract']>[0], // Cannot properly import the type here due to the hack above - method: 'setBlockVerifier' | 'setEpochVerifier', ) => { const contract = await verifier.generateSolidityContract(artifact, 'UltraHonkVerifier.sol'); log(`Generated UltraHonkVerifier contract for ${artifact}`); @@ -55,12 +54,11 @@ export async function deployUltraHonkVerifier( log(`Compiled UltraHonkVerifier contract for ${artifact}`); const { address: verifierAddress } = await deployL1Contract(walletClient, publicClient, abi, bytecode); log(`Deployed real ${artifact} verifier at ${verifierAddress}`); - await rollup.write[method]([verifierAddress.toString()]); + await rollup.write.setEpochVerifier([verifierAddress.toString()]); log(`Set ${artifact} verifier in ${rollup.address} rollup contract to ${verifierAddress}`); }; - await setupVerifier('BlockRootRollupFinalArtifact', 'setBlockVerifier'); - await setupVerifier('RootRollupArtifact', 'setEpochVerifier'); + await setupVerifier('RootRollupArtifact'); log(`Rollup accepts only real proofs now`); } @@ -98,7 +96,6 @@ export async function deployMockVerifier( client: walletClient, }); - await rollup.write.setBlockVerifier([mockVerifierAddress.toString()]); await rollup.write.setEpochVerifier([mockVerifierAddress.toString()]); log(`Rollup accepts only fake proofs now`); } diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index f1569b872b0..691d0c3aae6 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -11,8 +11,7 @@ import { } from '@aztec/aztec.js'; // eslint-disable-next-line no-restricted-imports import { - type BlockSimulator, - PROVING_STATUS, + type BlockBuilder, type ProcessedTx, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, makeProcessedTx, @@ -90,7 +89,7 @@ describe('L1Publisher integration', () => { let publisher: L1Publisher; - let builder: BlockSimulator; + let builder: BlockBuilder; let builderDb: MerkleTrees; // The header of the last block @@ -314,11 +313,11 @@ describe('L1Publisher integration', () => { }; const buildBlock = async (globalVariables: GlobalVariables, txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { - const blockTicket = await builder.startNewBlock(txs.length, globalVariables, l1ToL2Messages); + await builder.startNewBlock(txs.length, globalVariables, l1ToL2Messages); for (const tx of txs) { await builder.addNewTx(tx); } - return blockTicket; + return builder.setBlockCompleted(); }; it(`Build ${numberOfConsecutiveBlocks} blocks of 4 bloated txs building on each other`, async () => { @@ -365,11 +364,8 @@ describe('L1Publisher integration', () => { feeRecipient, GasFees.empty(), ); - const ticket = await buildBlock(globalVariables, txs, currentL1ToL2Messages); - const result = await ticket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const blockResult = await builder.finaliseBlock(); - const block = blockResult.block; + + const block = await buildBlock(globalVariables, txs, currentL1ToL2Messages); prevHeader = block.header; blockSource.getL1ToL2Messages.mockResolvedValueOnce(currentL1ToL2Messages); blockSource.getBlocks.mockResolvedValueOnce([block]); @@ -473,12 +469,7 @@ describe('L1Publisher integration', () => { feeRecipient, GasFees.empty(), ); - const blockTicket = await buildBlock(globalVariables, txs, l1ToL2Messages); - await builder.setBlockCompleted(); - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const blockResult = await builder.finaliseBlock(); - const block = blockResult.block; + const block = await buildBlock(globalVariables, txs, l1ToL2Messages); prevHeader = block.header; blockSource.getL1ToL2Messages.mockResolvedValueOnce(l1ToL2Messages); blockSource.getBlocks.mockResolvedValueOnce([block]); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index 7475e1d707c..9431cf0da32 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -380,20 +380,16 @@ export class FullProverTest { // REFACTOR: Extract this method to a common package. We need a package that deals with L1 // but also has a reference to L1 artifacts and bb-prover. - const setupVerifier = async ( - artifact: UltraKeccakHonkProtocolArtifact, - method: 'setBlockVerifier' | 'setEpochVerifier', - ) => { + const setupVerifier = async (artifact: UltraKeccakHonkProtocolArtifact) => { const contract = await verifier.generateSolidityContract(artifact, 'UltraHonkVerifier.sol'); const { abi, bytecode } = compileContract('UltraHonkVerifier.sol', 'HonkVerifier', contract, solc); const { address: verifierAddress } = await deployL1Contract(walletClient, publicClient, abi, bytecode); this.logger.info(`Deployed real ${artifact} verifier at ${verifierAddress}`); - await rollup.write[method]([verifierAddress.toString()]); + await rollup.write.setEpochVerifier([verifierAddress.toString()]); }; - await setupVerifier('BlockRootRollupFinalArtifact', 'setBlockVerifier'); - await setupVerifier('RootRollupArtifact', 'setEpochVerifier'); + await setupVerifier('RootRollupArtifact'); this.logger.info('Rollup only accepts valid proofs now'); } diff --git a/yarn-project/noir-protocol-circuits-types/src/artifacts.ts b/yarn-project/noir-protocol-circuits-types/src/artifacts.ts index fa5ebf76a70..c4f5c952173 100644 --- a/yarn-project/noir-protocol-circuits-types/src/artifacts.ts +++ b/yarn-project/noir-protocol-circuits-types/src/artifacts.ts @@ -37,7 +37,6 @@ import BaseRollupSimulatedJson from '../artifacts/rollup_base_simulated.json' as import BlockMergeRollupJson from '../artifacts/rollup_block_merge.json' assert { type: 'json' }; import BlockRootRollupJson from '../artifacts/rollup_block_root.json' assert { type: 'json' }; import EmptyBlockRootRollupJson from '../artifacts/rollup_block_root_empty.json' assert { type: 'json' }; -import BlockRootRollupFinalJson from '../artifacts/rollup_block_root_final.json' assert { type: 'json' }; import MergeRollupJson from '../artifacts/rollup_merge.json' assert { type: 'json' }; import RootRollupJson from '../artifacts/rollup_root.json' assert { type: 'json' }; @@ -70,7 +69,6 @@ export type ServerProtocolArtifact = | 'MergeRollupArtifact' | 'BlockRootRollupArtifact' | 'EmptyBlockRootRollupArtifact' - | 'BlockRootRollupFinalArtifact' // TODO(palla/prover): Delete this artifact | 'BlockMergeRollupArtifact' | 'RootRollupArtifact'; @@ -97,7 +95,6 @@ export const ServerCircuitArtifacts: Record = { @@ -114,7 +111,6 @@ export const SimulatedServerCircuitArtifacts: Record = { diff --git a/yarn-project/noir-protocol-circuits-types/src/vks.ts b/yarn-project/noir-protocol-circuits-types/src/vks.ts index 864fc967197..c140d10fd2b 100644 --- a/yarn-project/noir-protocol-circuits-types/src/vks.ts +++ b/yarn-project/noir-protocol-circuits-types/src/vks.ts @@ -3,7 +3,6 @@ import { BASE_ROLLUP_INDEX, BLOCK_MERGE_ROLLUP_INDEX, BLOCK_ROOT_ROLLUP_EMPTY_INDEX, - BLOCK_ROOT_ROLLUP_FINAL_INDEX, BLOCK_ROOT_ROLLUP_INDEX, EMPTY_NESTED_INDEX, Fr, @@ -55,7 +54,6 @@ import BaseRollupVkJson from '../artifacts/keys/rollup_base.vk.data.json' assert import BlockMergeRollupVkJson from '../artifacts/keys/rollup_block_merge.vk.data.json' assert { type: 'json' }; import BlockRootRollupVkJson from '../artifacts/keys/rollup_block_root.vk.data.json' assert { type: 'json' }; import EmptyBlockRootRollupVkJson from '../artifacts/keys/rollup_block_root_empty.vk.data.json' assert { type: 'json' }; -import BlockRootRollupFinalVkJson from '../artifacts/keys/rollup_block_root_final.vk.data.json' assert { type: 'json' }; import MergeRollupVkJson from '../artifacts/keys/rollup_merge.vk.data.json' assert { type: 'json' }; import RootRollupVkJson from '../artifacts/keys/rollup_root.vk.data.json' assert { type: 'json' }; import { type ClientProtocolArtifact, type ProtocolArtifact, type ServerProtocolArtifact } from './artifacts.js'; @@ -92,7 +90,6 @@ const ServerCircuitVks: Record = { MergeRollupArtifact: keyJsonToVKData(MergeRollupVkJson), BlockRootRollupArtifact: keyJsonToVKData(BlockRootRollupVkJson), EmptyBlockRootRollupArtifact: keyJsonToVKData(EmptyBlockRootRollupVkJson), - BlockRootRollupFinalArtifact: keyJsonToVKData(BlockRootRollupFinalVkJson), BlockMergeRollupArtifact: keyJsonToVKData(BlockMergeRollupVkJson), RootRollupArtifact: keyJsonToVKData(RootRollupVkJson), }; @@ -139,7 +136,6 @@ export const ProtocolCircuitVkIndexes: Record = { BlockMergeRollupArtifact: BLOCK_MERGE_ROLLUP_INDEX, RootRollupArtifact: ROOT_ROLLUP_INDEX, EmptyBlockRootRollupArtifact: BLOCK_ROOT_ROLLUP_EMPTY_INDEX, - BlockRootRollupFinalArtifact: BLOCK_ROOT_ROLLUP_FINAL_INDEX, }; function buildVKTree() { diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 8ccd8bba506..84732a4c12b 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -1,8 +1,8 @@ import { type BBProverConfig } from '@aztec/bb-prover'; import { - type BlockProver, type MerkleTreeAdminOperations, type ProcessedTx, + type ProcessedTxHandler, type PublicExecutionRequest, type ServerCircuitProver, type Tx, @@ -53,7 +53,7 @@ export class TestContext { public logger: DebugLogger, ) {} - public get blockProver() { + public get epochProver() { return this.orchestrator; } @@ -150,7 +150,7 @@ export class TestContext { public async processPublicFunctions( txs: Tx[], maxTransactions: number, - blockProver?: BlockProver, + txHandler?: ProcessedTxHandler, txValidator?: TxValidator, ) { const defaultExecutorImplementation = ( @@ -182,7 +182,7 @@ export class TestContext { return await this.processPublicFunctionsWithMockExecutorImplementation( txs, maxTransactions, - blockProver, + txHandler, txValidator, defaultExecutorImplementation, ); @@ -191,7 +191,7 @@ export class TestContext { public async processPublicFunctionsWithMockExecutorImplementation( txs: Tx[], maxTransactions: number, - blockProver?: BlockProver, + txHandler?: ProcessedTxHandler, txValidator?: TxValidator, executorMock?: ( execution: PublicExecutionRequest, @@ -206,6 +206,6 @@ export class TestContext { if (executorMock) { this.publicExecutor.simulate.mockImplementation(executorMock); } - return await this.publicProcessor.process(txs, maxTransactions, blockProver, txValidator); + return await this.publicProcessor.process(txs, maxTransactions, txHandler, txValidator); } } diff --git a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts index f3c2519c831..c9d136f6b50 100644 --- a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts @@ -1,4 +1,4 @@ -import { type L2Block, type MerkleTreeId, type ProvingResult } from '@aztec/circuit-types'; +import { type L2Block, type MerkleTreeId } from '@aztec/circuit-types'; import { type ARCHIVE_HEIGHT, type AppendOnlyTreeSnapshot, @@ -18,14 +18,9 @@ import { } from '@aztec/circuits.js'; import { type Tuple } from '@aztec/foundation/serialize'; +import { type EpochProvingState } from './epoch-proving-state.js'; import { type TxProvingState } from './tx-proving-state.js'; -enum PROVING_STATE_LIFECYCLE { - PROVING_STATE_CREATED, - PROVING_STATE_RESOLVED, - PROVING_STATE_REJECTED, -} - export type MergeRollupInputData = { inputs: [BaseOrMergeRollupPublicInputs | undefined, BaseOrMergeRollupPublicInputs | undefined]; proofs: [ @@ -50,8 +45,6 @@ export class BlockProvingState { public block: L2Block | undefined; private txs: TxProvingState[] = []; - private provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED; - constructor( public readonly index: number, public readonly totalNumTxs: number, @@ -63,8 +56,7 @@ export class BlockProvingState { public readonly archiveTreeSnapshot: AppendOnlyTreeSnapshot, public readonly archiveTreeRootSiblingPath: Tuple, public readonly previousBlockHash: Fr, - private completionCallback?: (result: ProvingResult) => void, - private rejectionCallback?: (reason: string) => void, + private readonly parentEpoch: EpochProvingState, ) { this.rootParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }).map(_ => undefined); } @@ -206,35 +198,15 @@ export class BlockProvingState { // Returns true if we are still able to accept transactions, false otherwise public isAcceptingTransactions() { - return ( - this.provingStateLifecycle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED && this.totalNumTxs > this.txs.length - ); + return this.totalNumTxs > this.txs.length; } - // Returns true if this proving state is still valid, false otherwise + // Returns whether the proving state is still valid public verifyState() { - return this.provingStateLifecycle === PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED; + return this.parentEpoch.verifyState(); } - // Attempts to reject the proving state promise with the given reason public reject(reason: string) { - if (!this.verifyState()) { - return; - } - this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_REJECTED; - if (this.rejectionCallback) { - this.rejectionCallback(reason); - } - } - - // Attempts to resolve the proving state promise with the given result - public resolve(result: ProvingResult) { - if (!this.verifyState()) { - return; - } - this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_RESOLVED; - if (this.completionCallback) { - this.completionCallback(result); - } + this.parentEpoch.reject(reason); } } diff --git a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts index 03d32a5e392..a13a8d600dc 100644 --- a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts @@ -1,4 +1,4 @@ -import { type MerkleTreeId, type ProvingResult } from '@aztec/circuit-types'; +import { type MerkleTreeId } from '@aztec/circuit-types'; import { type ARCHIVE_HEIGHT, type AppendOnlyTreeSnapshot, @@ -36,6 +36,8 @@ export type BlockMergeRollupInputData = { verificationKeys: [VerificationKeyAsFields | undefined, VerificationKeyAsFields | undefined]; }; +export type ProvingResult = { status: 'success' } | { status: 'failure'; reason: string }; + /** * The current state of the proving schedule for an epoch. * Contains the raw inputs and intermediate state to generate every constituent proof in the tree. @@ -55,8 +57,6 @@ export class EpochProvingState { public readonly totalNumBlocks: number, private completionCallback: (result: ProvingResult) => void, private rejectionCallback: (reason: string) => void, - /** Whether to prove the epoch. Temporary while we still care about proving blocks. */ - public readonly proveEpoch: boolean, ) {} /** Returns the current block proving state */ @@ -110,8 +110,6 @@ export class EpochProvingState { archiveTreeSnapshot: AppendOnlyTreeSnapshot, archiveTreeRootSiblingPath: Tuple, previousBlockHash: Fr, - completionCallback?: (result: ProvingResult) => void, - rejectionCallback?: (reason: string) => void, ) { const block = new BlockProvingState( this.blocks.length, @@ -124,15 +122,7 @@ export class EpochProvingState { archiveTreeSnapshot, archiveTreeRootSiblingPath, previousBlockHash, - completionCallback, - reason => { - // Reject the block - if (rejectionCallback) { - rejectionCallback(reason); - } - // An error on any block rejects this whole epoch - this.reject(reason); - }, + this, ); this.blocks.push(block); if (this.blocks.length === this.totalNumBlocks) { @@ -217,10 +207,6 @@ export class EpochProvingState { } this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_REJECTED; this.rejectionCallback(reason); - - for (const block of this.blocks) { - block.reject('Proving cancelled'); - } } // Attempts to resolve the proving state promise with the given result diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 11a28c16e7a..38978fab21c 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -1,20 +1,14 @@ import { - BlockProofError, Body, EncryptedNoteTxL2Logs, EncryptedTxL2Logs, L2Block, MerkleTreeId, - PROVING_STATUS, type PaddingProcessedTx, type ProcessedTx, - type ProvingBlockResult, ProvingRequestType, - type ProvingResult, - type ProvingTicket, type PublicInputsAndRecursiveProof, type ServerCircuitProver, - Tx, type TxEffect, UnencryptedTxL2Logs, makeEmptyProcessedTx, @@ -84,7 +78,12 @@ import { validateTx, } from './block-building-helpers.js'; import { type BlockProvingState, type MergeRollupInputData } from './block-proving-state.js'; -import { type BlockMergeRollupInputData, EpochProvingState, type TreeSnapshots } from './epoch-proving-state.js'; +import { + type BlockMergeRollupInputData, + EpochProvingState, + type ProvingResult, + type TreeSnapshots, +} from './epoch-proving-state.js'; import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js'; import { TX_PROVING_CODE, type TxProvingInstruction, TxProvingState } from './tx-proving-state.js'; @@ -109,6 +108,7 @@ export class ProvingOrchestrator implements EpochProver { private pendingProvingJobs: AbortController[] = []; private paddingTx: PaddingProcessedTx | undefined = undefined; + private provingPromise: Promise | undefined = undefined; private metrics: ProvingOrchestratorMetrics; constructor( @@ -135,20 +135,20 @@ export class ProvingOrchestrator implements EpochProver { this.paddingTx = undefined; } - public startNewEpoch(epochNumber: number, totalNumBlocks: number, proveEpoch = true): ProvingTicket { + public startNewEpoch(epochNumber: number, totalNumBlocks: number) { const { promise: _promise, resolve, reject } = promiseWithResolvers(); - const promise = _promise.catch((reason): ProvingResult => ({ status: PROVING_STATUS.FAILURE, reason })); - if (totalNumBlocks <= 0) { - throw new Error(`Invalid number of blocks for epoch: ${totalNumBlocks}`); + const promise = _promise.catch((reason): ProvingResult => ({ status: 'failure', reason })); + if (totalNumBlocks <= 0 || !Number.isInteger(totalNumBlocks)) { + throw new Error(`Invalid number of blocks for epoch (got ${totalNumBlocks})`); } logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`); - this.provingState = new EpochProvingState(epochNumber, totalNumBlocks, resolve, reject, proveEpoch); - return { provingPromise: promise }; + this.provingState = new EpochProvingState(epochNumber, totalNumBlocks, resolve, reject); + this.provingPromise = promise; } /** * Starts off a new block - * @param numTxs - The total number of transactions in the block. Must be a power of 2 + * @param numTxs - The total number of transactions in the block. * @param globalVariables - The global variables for the block * @param l1ToL2Messages - The l1 to l2 messages for the block * @param verificationKeys - The private kernel verification keys @@ -158,15 +158,9 @@ export class ProvingOrchestrator implements EpochProver { [Attributes.BLOCK_SIZE]: numTxs, [Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber(), })) - public async startNewBlock( - numTxs: number, - globalVariables: GlobalVariables, - l1ToL2Messages: Fr[], - ): Promise { - // If no proving state, assume we only care about proving this block and initialize a 1-block epoch - // TODO(palla/prover): Remove this flow once we drop block-only proving + public async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]) { if (!this.provingState) { - this.startNewEpoch(globalVariables.blockNumber.toNumber(), 1, false); + throw new Error(`Invalid proving state, call startNewEpoch before starting a block`); } if (!this.provingState?.isAcceptingBlocks()) { @@ -174,10 +168,14 @@ export class ProvingOrchestrator implements EpochProver { } if (!Number.isInteger(numTxs) || numTxs < 2) { - throw new Error(`Length of txs for the block should be at least two (got ${numTxs})`); + throw new Error(`Invalid number of txs for block (got ${numTxs})`); } - // TODO(palla/prover-node): Store block number in the db itself to make this check more reliable, + if (this.provingState.currentBlock && !this.provingState.currentBlock.block) { + throw new Error(`Must end previous block before starting a new one`); + } + + // TODO(palla/prover): Store block number in the db itself to make this check more reliable, // and turn this warning into an exception that we throw. const { blockNumber } = globalVariables; const dbBlockNumber = (await this.db.getTreeInfo(MerkleTreeId.ARCHIVE)).size - 1n; @@ -230,9 +228,6 @@ export class ProvingOrchestrator implements EpochProver { BigInt(startArchiveSnapshot.nextAvailableLeafIndex - 1), ); - const { promise: _promise, resolve, reject } = promiseWithResolvers(); - const promise = _promise.catch((reason): ProvingResult => ({ status: PROVING_STATUS.FAILURE, reason })); - this.provingState!.startNewBlock( numTxs, globalVariables, @@ -243,16 +238,12 @@ export class ProvingOrchestrator implements EpochProver { startArchiveSnapshot, newArchiveSiblingPath, previousBlockHash!, - resolve, - reject, ); // Enqueue base parity circuits for the block for (let i = 0; i < baseParityInputs.length; i++) { this.enqueueBaseParityCircuit(this.provingState!.currentBlock!, baseParityInputs[i], i); } - - return { provingPromise: promise }; } /** @@ -272,6 +263,10 @@ export class ProvingOrchestrator implements EpochProver { throw new Error(`Rollup not accepting further transactions`); } + if (!provingState.verifyState()) { + throw new Error(`Invalid proving state when adding a tx`); + } + validateTx(tx); logger.info(`Received transaction: ${tx.hash}`); @@ -285,10 +280,7 @@ export class ProvingOrchestrator implements EpochProver { this.enqueueFirstProofs(inputs, treeSnapshots, tx, provingState); if (provingState.transactionsReceived === provingState.totalNumTxs) { - logger.verbose( - `All transactions received for block ${provingState.globalVariables.blockNumber}. Assembling header.`, - ); - await this.buildBlockHeader(provingState); + logger.verbose(`All transactions received for block ${provingState.globalVariables.blockNumber}.`); } } @@ -307,53 +299,64 @@ export class ProvingOrchestrator implements EpochProver { [Attributes.BLOCK_TXS_COUNT]: block.transactionsReceived, }; }) - public async setBlockCompleted() { + public async setBlockCompleted(): Promise { const provingState = this.provingState?.currentBlock; if (!provingState) { throw new Error(`Invalid proving state, call startNewBlock before adding transactions or completing the block`); } - // we may need to pad the rollup with empty transactions + // We may need to pad the rollup with empty transactions const paddingTxCount = provingState.totalNumTxs - provingState.transactionsReceived; - if (paddingTxCount === 0) { - return; - } else if (provingState.totalNumTxs > 2) { + if (paddingTxCount > 0 && provingState.totalNumTxs > 2) { throw new Error(`Block not ready for completion: expecting ${paddingTxCount} more transactions.`); } - logger.debug(`Padding rollup with ${paddingTxCount} empty transactions`); - // Make an empty padding transaction - // Required for: - // 0 (when we want an empty block, largely for testing), or - // 1 (we need to pad with one tx as all rollup circuits require a pair of inputs) txs - // Insert it into the tree the required number of times to get all of the - // base rollup inputs - // Then enqueue the proving of all the transactions - const unprovenPaddingTx = makeEmptyProcessedTx( - this.db.getInitialHeader(), - provingState.globalVariables.chainId, - provingState.globalVariables.version, - getVKTreeRoot(), - ); - const txInputs: Array<{ inputs: BaseRollupInputs; snapshot: TreeSnapshots }> = []; - for (let i = 0; i < paddingTxCount; i++) { - const [inputs, snapshot] = await this.prepareTransaction(unprovenPaddingTx, provingState); - const txInput = { - inputs, - snapshot, - }; - txInputs.push(txInput); - } + if (paddingTxCount > 0) { + logger.debug(`Padding rollup with ${paddingTxCount} empty transactions`); + // Make an empty padding transaction + // Required for: + // 0 (when we want an empty block, largely for testing), or + // 1 (we need to pad with one tx as all rollup circuits require a pair of inputs) txs + // Insert it into the tree the required number of times to get all of the + // base rollup inputs + // Then enqueue the proving of all the transactions + const unprovenPaddingTx = makeEmptyProcessedTx( + this.db.getInitialHeader(), + provingState.globalVariables.chainId, + provingState.globalVariables.version, + getVKTreeRoot(), + ); + const txInputs: Array<{ inputs: BaseRollupInputs; snapshot: TreeSnapshots }> = []; + for (let i = 0; i < paddingTxCount; i++) { + const [inputs, snapshot] = await this.prepareTransaction(unprovenPaddingTx, provingState); + const txInput = { + inputs, + snapshot, + }; + txInputs.push(txInput); + } - // Now enqueue the proving - this.enqueuePaddingTxs(provingState, txInputs, unprovenPaddingTx); + // Now enqueue the proving + this.enqueuePaddingTxs(provingState, txInputs, unprovenPaddingTx); + } // And build the block header - logger.verbose(`Block ${provingState.globalVariables.blockNumber} padded with empty tx(s). Assembling header.`); - await this.buildBlockHeader(provingState); + logger.verbose(`Block ${provingState.globalVariables.blockNumber} completed. Assembling header.`); + await this.buildBlock(provingState); + + return provingState.block!; + } + + /** Returns the block as built for a given index. */ + public getBlock(index: number): L2Block { + const block = this.provingState?.blocks[index].block; + if (!block) { + throw new Error(`Block at index ${index} not available`); + } + return block; } - @trackSpan('ProvingOrchestrator.setEpochCompleted', function () { + @trackSpan('ProvingOrchestrator.padEpoch', function () { if (!this.provingState) { return {}; } @@ -362,15 +365,11 @@ export class ProvingOrchestrator implements EpochProver { [Attributes.EPOCH_SIZE]: this.provingState.totalNumBlocks, }; }) - public setEpochCompleted() { - const provingState = this.provingState; - if (!provingState) { - throw new Error(`Invalid proving state, call startNewEpoch first`); - } - + private padEpoch() { + const provingState = this.provingState!; const lastBlock = provingState.currentBlock?.block; if (!lastBlock) { - throw new Error(`Epoch needs at least one completed block in order to be marked as completed`); + throw new Error(`Epoch needs at least one completed block in order to be padded`); } const paddingBlockCount = Math.max(2, provingState.totalNumBlocks) - provingState.blocks.length; @@ -416,7 +415,7 @@ export class ProvingOrchestrator implements EpochProver { ); } - private async buildBlockHeader(provingState: BlockProvingState) { + private async buildBlock(provingState: BlockProvingState) { // Collect all new nullifiers, commitments, and contracts from all txs in this block to build body const gasFees = provingState.globalVariables.gasFees; const nonEmptyTxEffects: TxEffect[] = provingState!.allTxs @@ -558,42 +557,22 @@ export class ProvingOrchestrator implements EpochProver { } /** - * Returns the fully proven block. Requires proving to have been completed. - * @param index - The index of the block to finalise. Defaults to the last block. - * @returns The fully proven block and proof. + * Returns the proof for the current epoch. */ - public finaliseBlock(index?: number) { - try { - const block = this.provingState?.blocks[index ?? this.provingState?.blocks.length - 1]; - - if (!block || !block.blockRootRollupPublicInputs || !block.finalProof || !block.block) { - throw new Error(`Invalid proving state, a block must be proven before it can be finalised`); - } + public async finaliseEpoch() { + if (!this.provingState || !this.provingPromise) { + throw new Error(`Invalid proving state, an epoch must be proven before it can be finalised`); + } - const blockResult: ProvingBlockResult = { - proof: block.finalProof, - aggregationObject: block.finalProof.extractAggregationObject(), - block: block.block!, - }; + this.padEpoch(); - return Promise.resolve(blockResult); - } catch (err) { - throw new BlockProofError( - err && typeof err === 'object' && 'message' in err ? String(err.message) : String(err), - this.provingState?.blocks[index ?? this.provingState?.blocks.length - 1]?.allTxs.map(x => - Tx.getHash(x.processedTx), - ) ?? [], - ); + const result = await this.provingPromise!; + if (result.status === 'failure') { + throw new Error(`Epoch proving failed: ${result.reason}`); } - } - /** - * Returns the proof for the current epoch. - * Requires proving to have been completed. - */ - public finaliseEpoch() { - if (!this.provingState || !this.provingState.rootRollupPublicInputs || !this.provingState.finalProof) { - throw new Error(`Invalid proving state, an epoch must be proven before it can be finalised`); + if (!this.provingState.rootRollupPublicInputs || !this.provingState.finalProof) { + throw new Error(`Invalid proving state, missing root rollup public inputs or final proof`); } pushTestData('epochProofResult', { @@ -924,8 +903,6 @@ export class ProvingOrchestrator implements EpochProver { proverId: this.proverId, }); - const shouldProveEpoch = this.provingState!.proveEpoch; - this.deferredProving( provingState, wrapCallbackInSpan( @@ -935,10 +912,7 @@ export class ProvingOrchestrator implements EpochProver { [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', [Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-root-rollup' satisfies CircuitName, }, - signal => - shouldProveEpoch - ? this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber) - : this.prover.getBlockRootRollupFinalProof(inputs, signal, provingState.epochNumber), + signal => this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber), ), result => { const header = this.extractBlockHeaderFromPublicInputs(provingState, result.inputs); @@ -951,17 +925,10 @@ export class ProvingOrchestrator implements EpochProver { provingState.blockRootRollupPublicInputs = result.inputs; provingState.finalProof = result.proof.binaryProof; - provingState.resolve({ status: PROVING_STATUS.SUCCESS }); logger.debug(`Completed proof for block root rollup for ${provingState.block?.number}`); // validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover) - // TODO(palla/prover): Remove this once we've dropped the flow for proving single blocks - if (!shouldProveEpoch) { - logger.verbose(`Skipping epoch rollup, only one block in epoch`); - return; - } - const currentLevel = this.provingState!.numMergeLevels + 1n; this.storeAndExecuteNextBlockMergeLevel(this.provingState!, currentLevel, BigInt(provingState.index), [ result.inputs, @@ -1091,7 +1058,7 @@ export class ProvingOrchestrator implements EpochProver { logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`); provingState.rootRollupPublicInputs = result.inputs; provingState.finalProof = result.proof.binaryProof; - provingState.resolve({ status: PROVING_STATUS.SUCCESS }); + provingState.resolve({ status: 'success' }); }, ); } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts index 01bdd400643..dd21cbd2dd2 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts @@ -1,4 +1,3 @@ -import { PROVING_STATUS } from '@aztec/circuit-types'; import { Fr } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -29,7 +28,8 @@ describe('prover/orchestrator/errors', () => { makeBloatedProcessedTx(context.actualDb, 4), ]; - const blockTicket = await context.orchestrator.startNewBlock(txs.length, context.globalVariables, []); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(txs.length, context.globalVariables, []); for (const tx of txs) { await context.orchestrator.addNewTx(tx); @@ -39,11 +39,9 @@ describe('prover/orchestrator/errors', () => { async () => await context.orchestrator.addNewTx(makeEmptyProcessedTestTx(context.actualDb)), ).rejects.toThrow('Rollup not accepting further transactions'); - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const block = await context.orchestrator.setBlockCompleted(); + expect(block.number).toEqual(context.blockNumber); + await context.orchestrator.finaliseEpoch(); }); it('throws if adding too many blocks', async () => { @@ -56,25 +54,28 @@ describe('prover/orchestrator/errors', () => { ).rejects.toThrow('Epoch not accepting further blocks'); }); - it('throws if adding a transaction before start', async () => { + it('throws if adding a transaction before starting epoch', async () => { + await expect( + async () => await context.orchestrator.addNewTx(makeEmptyProcessedTestTx(context.actualDb)), + ).rejects.toThrow(`Invalid proving state, call startNewBlock before adding transactions`); + }); + + it('throws if adding a transaction before starting block', async () => { + context.orchestrator.startNewEpoch(1, 1); await expect( async () => await context.orchestrator.addNewTx(makeEmptyProcessedTestTx(context.actualDb)), ).rejects.toThrow(`Invalid proving state, call startNewBlock before adding transactions`); }); it('throws if completing a block before start', async () => { + context.orchestrator.startNewEpoch(1, 1); await expect(async () => await context.orchestrator.setBlockCompleted()).rejects.toThrow( 'Invalid proving state, call startNewBlock before adding transactions or completing the block', ); }); - it('throws if finalising an incomplete block', async () => { - await expect(async () => await context.orchestrator.finaliseBlock()).rejects.toThrow( - 'Invalid proving state, a block must be proven before it can be finalised', - ); - }); - - it('throws if setting an incomplete block completed', async () => { + it('throws if setting an incomplete block as completed', async () => { + context.orchestrator.startNewEpoch(1, 1); await context.orchestrator.startNewBlock(3, context.globalVariables, []); await expect(async () => await context.orchestrator.setBlockCompleted()).rejects.toThrow( `Block not ready for completion: expecting ${3} more transactions.`, @@ -82,27 +83,35 @@ describe('prover/orchestrator/errors', () => { }); it('throws if adding to a cancelled block', async () => { + context.orchestrator.startNewEpoch(1, 1); await context.orchestrator.startNewBlock(2, context.globalVariables, []); - context.orchestrator.cancel(); await expect( async () => await context.orchestrator.addNewTx(makeEmptyProcessedTestTx(context.actualDb)), - ).rejects.toThrow('Rollup not accepting further transactions'); + ).rejects.toThrow('Invalid proving state when adding a tx'); }); it.each([[-4], [0], [1], [8.1]] as const)( 'fails to start a block with %i transactions', async (blockSize: number) => { + context.orchestrator.startNewEpoch(1, 1); await expect( async () => await context.orchestrator.startNewBlock(blockSize, context.globalVariables, []), - ).rejects.toThrow(`Length of txs for the block should be at least two (got ${blockSize})`); + ).rejects.toThrow(`Invalid number of txs for block (got ${blockSize})`); }, ); + it.each([[-4], [0], [8.1]] as const)('fails to start an epoch with %i blocks', (epochSize: number) => { + context.orchestrator.startNewEpoch(1, 1); + expect(() => context.orchestrator.startNewEpoch(1, epochSize)).toThrow( + `Invalid number of blocks for epoch (got ${epochSize})`, + ); + }); + it('rejects if too many l1 to l2 messages are provided', async () => { - // Assemble a fake transaction const l1ToL2Messages = new Array(100).fill(new Fr(0n)); + context.orchestrator.startNewEpoch(1, 1); await expect( async () => await context.orchestrator.startNewBlock(2, context.globalVariables, l1ToL2Messages), ).rejects.toThrow('Too many L1 to L2 messages'); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts index a3bd36fe359..ac4d2a5710a 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -1,4 +1,4 @@ -import { PROVING_STATUS, type ServerCircuitProver } from '@aztec/circuit-types'; +import { type ServerCircuitProver } from '@aztec/circuit-types'; import { Fr } from '@aztec/circuits.js'; import { times } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -35,52 +35,23 @@ describe('prover/orchestrator/failures', () => { }); it.each([ - [ - 'Base Rollup Failed', - () => { - jest.spyOn(mockProver, 'getBaseRollupProof').mockRejectedValue('Base Rollup Failed'); - }, - ], - [ - 'Merge Rollup Failed', - () => { - jest.spyOn(mockProver, 'getMergeRollupProof').mockRejectedValue('Merge Rollup Failed'); - }, - ], + ['Base Rollup Failed', (msg: string) => jest.spyOn(mockProver, 'getBaseRollupProof').mockRejectedValue(msg)], + ['Merge Rollup Failed', (msg: string) => jest.spyOn(mockProver, 'getMergeRollupProof').mockRejectedValue(msg)], [ 'Block Root Rollup Failed', - () => { - jest.spyOn(mockProver, 'getBlockRootRollupProof').mockRejectedValue('Block Root Rollup Failed'); - }, + (msg: string) => jest.spyOn(mockProver, 'getBlockRootRollupProof').mockRejectedValue(msg), ], [ 'Block Merge Rollup Failed', - () => { - jest.spyOn(mockProver, 'getBlockMergeRollupProof').mockRejectedValue('Block Merge Rollup Failed'); - }, - ], - [ - 'Root Rollup Failed', - () => { - jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue('Root Rollup Failed'); - }, - ], - [ - 'Base Parity Failed', - () => { - jest.spyOn(mockProver, 'getBaseParityProof').mockRejectedValue('Base Parity Failed'); - }, - ], - [ - 'Root Parity Failed', - () => { - jest.spyOn(mockProver, 'getRootParityProof').mockRejectedValue('Root Parity Failed'); - }, + (msg: string) => jest.spyOn(mockProver, 'getBlockMergeRollupProof').mockRejectedValue(msg), ], - ] as const)('handles a %s error', async (message: string, fn: () => void) => { - fn(); + ['Root Rollup Failed', (msg: string) => jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue(msg)], + ['Base Parity Failed', (msg: string) => jest.spyOn(mockProver, 'getBaseParityProof').mockRejectedValue(msg)], + ['Root Parity Failed', (msg: string) => jest.spyOn(mockProver, 'getRootParityProof').mockRejectedValue(msg)], + ] as const)('handles a %s error', async (message: string, fn: (msg: string) => void) => { + fn(message); - const epochTicket = orchestrator.startNewEpoch(1, 3); + orchestrator.startNewEpoch(1, 3); // We need at least 3 blocks and 3 txs to ensure all circuits are used for (let i = 0; i < 3; i++) { @@ -90,9 +61,10 @@ describe('prover/orchestrator/failures', () => { for (const tx of txs) { await orchestrator.addNewTx(tx); } + await orchestrator.setBlockCompleted(); } - await expect(epochTicket.provingPromise).resolves.toEqual({ status: PROVING_STATUS.FAILURE, reason: message }); + await expect(() => orchestrator.finaliseEpoch()).rejects.toThrow(`Epoch proving failed: ${message}`); }); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts index 3570e0af591..ba76c3d0c23 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts @@ -37,6 +37,8 @@ describe('prover/orchestrator/lifecycle', () => { deferredPromises.push(deferred); return deferred.promise; }); + + orchestrator.startNewEpoch(1, 1); await orchestrator.startNewBlock(2, makeGlobalVariables(1), []); await sleep(1); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts index 6c959bd9b43..d3ece0b6ba5 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts @@ -1,19 +1,23 @@ -import { PROVING_STATUS } from '@aztec/circuit-types'; +import { MerkleTreeId, type MerkleTreeOperations } from '@aztec/circuit-types'; import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; import { fr } from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; +import { times } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; +import { MerkleTrees } from '@aztec/world-state'; -import { makeBloatedProcessedTx } from '../mocks/fixtures.js'; +import { makeBloatedProcessedTx, updateExpectedTreesFromTxs } from '../mocks/fixtures.js'; import { TestContext } from '../mocks/test_context.js'; const logger = createDebugLogger('aztec:orchestrator-mixed-blocks'); describe('prover/orchestrator/mixed-blocks', () => { let context: TestContext; + let expectsDb: MerkleTreeOperations; beforeEach(async () => { context = await TestContext.new(logger); + expectsDb = await MerkleTrees.tmp().then(t => t.asLatest()); }); afterEach(async () => { @@ -30,19 +34,38 @@ describe('prover/orchestrator/mixed-blocks', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - const blockTicket = await context.orchestrator.startNewBlock(3, context.globalVariables, l1ToL2Messages); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(3, context.globalVariables, l1ToL2Messages); + for (const tx of txs) { + await context.orchestrator.addNewTx(tx); + } + + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); + }); + + it.each([2, 4, 5, 8] as const)('builds an L2 block with %i bloated txs', async (totalCount: number) => { + const txs = times(totalCount, (i: number) => makeBloatedProcessedTx(context.actualDb, i)); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(txs.length, context.globalVariables, l1ToL2Messages); for (const tx of txs) { await context.orchestrator.addNewTx(tx); } - await context.orchestrator.setBlockCompleted(); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); + await updateExpectedTreesFromTxs(expectsDb, txs); + const noteHashTreeAfter = await context.actualDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); + expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); }); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts deleted file mode 100644 index 98c72da2d0c..00000000000 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks_2.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { MerkleTreeId, PROVING_STATUS } from '@aztec/circuit-types'; -import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; -import { fr } from '@aztec/circuits.js/testing'; -import { range } from '@aztec/foundation/array'; -import { times } from '@aztec/foundation/collection'; -import { createDebugLogger } from '@aztec/foundation/log'; -import { openTmpStore } from '@aztec/kv-store/utils'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { type MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; - -import { makeBloatedProcessedTx, updateExpectedTreesFromTxs } from '../mocks/fixtures.js'; -import { TestContext } from '../mocks/test_context.js'; - -const logger = createDebugLogger('aztec:orchestrator-mixed-blocks-2'); - -describe('prover/orchestrator/mixed-blocks', () => { - let context: TestContext; - let expectsDb: MerkleTreeOperations; - - beforeEach(async () => { - context = await TestContext.new(logger); - expectsDb = await MerkleTrees.new(openTmpStore(), new NoopTelemetryClient()).then(t => t.asLatest()); - }); - - afterEach(async () => { - await context.cleanup(); - }); - - describe('blocks', () => { - it.each([2, 4, 5, 8] as const)('builds an L2 block with %i bloated txs', async (totalCount: number) => { - const txs = times(totalCount, (i: number) => makeBloatedProcessedTx(context.actualDb, i)); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const blockTicket = await context.orchestrator.startNewBlock(txs.length, context.globalVariables, l1ToL2Messages); - - for (const tx of txs) { - await context.orchestrator.addNewTx(tx); - } - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); - - await updateExpectedTreesFromTxs(expectsDb, txs); - const noteHashTreeAfter = await context.actualDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - - const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); - expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); - }); - }); -}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts index 206f562f2a9..4c2d48e4384 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts @@ -1,4 +1,4 @@ -import { PROVING_STATUS, mockTx } from '@aztec/circuit-types'; +import { mockTx } from '@aztec/circuit-types'; import { times } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; @@ -38,19 +38,17 @@ describe('prover/orchestrator/public-functions', () => { tx.data.constants.vkTreeRoot = getVKTreeRoot(); } - const blockTicket = await context.orchestrator.startNewBlock(numTransactions, context.globalVariables, []); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(numTransactions, context.globalVariables, []); - const [processed, failed] = await context.processPublicFunctions(txs, numTransactions, context.blockProver); + const [processed, failed] = await context.processPublicFunctions(txs, numTransactions, context.epochProver); expect(processed.length).toBe(numTransactions); expect(failed.length).toBe(0); - await context.orchestrator.setBlockCompleted(); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + expect(block.number).toEqual(context.blockNumber); }, ); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts index 533b8ecb62d..e4205dab158 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts @@ -1,4 +1,3 @@ -import { PROVING_STATUS } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; @@ -20,7 +19,7 @@ describe('prover/orchestrator/multi-block', () => { describe('multiple blocks', () => { it.each([1, 4, 5])('builds an epoch with %s blocks in sequence', async (numBlocks: number) => { - const provingTicket = context.orchestrator.startNewEpoch(1, numBlocks); + context.orchestrator.startNewEpoch(1, numBlocks); let header = context.actualDb.getInitialHeader(); for (let i = 0; i < numBlocks; i++) { @@ -33,29 +32,17 @@ describe('prover/orchestrator/multi-block', () => { const globals = makeGlobals(blockNum); // This will need to be a 2 tx block - const blockTicket = await context.orchestrator.startNewBlock(2, globals, []); + await context.orchestrator.startNewBlock(2, globals, []); await context.orchestrator.addNewTx(tx); // we need to complete the block as we have not added a full set of txs - await context.orchestrator.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(blockNum); - header = finalisedBlock.block.header; + const block = await context.orchestrator.setBlockCompleted(); + header = block!.header; } - logger.info('Setting epoch as completed'); - context.orchestrator.setEpochCompleted(); - - logger.info('Awaiting epoch ticket'); - const result = await provingTicket.provingPromise; - expect(result).toEqual({ status: PROVING_STATUS.SUCCESS }); - - const epoch = context.orchestrator.finaliseEpoch(); + logger.info('Finalising epoch'); + const epoch = await context.orchestrator.finaliseEpoch(); expect(epoch.publicInputs.endBlockNumber.toNumber()).toEqual(1000 + numBlocks - 1); expect(epoch.proof).toBeDefined(); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts index 505b2b94b57..68db5ce4cce 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts @@ -1,4 +1,4 @@ -import { PROVING_STATUS, PublicExecutionRequest, mockTx } from '@aztec/circuit-types'; +import { PublicExecutionRequest, mockTx } from '@aztec/circuit-types'; import { AztecAddress } from '@aztec/circuits.js'; import { makeCallContext } from '@aztec/circuits.js/testing'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -43,20 +43,16 @@ describe('prover/orchestrator/public-functions', () => { const [processed, _] = await context.processPublicFunctions([tx], 1, undefined); // This will need to be a 2 tx block - const blockTicket = await context.orchestrator.startNewBlock(2, context.globalVariables, []); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(2, context.globalVariables, []); for (const processedTx of processed) { await context.orchestrator.addNewTx(processedTx); } - // we need to complete the block as we have not added a full set of txs - await context.orchestrator.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); }, ); @@ -135,20 +131,16 @@ describe('prover/orchestrator/public-functions', () => { ); // This will need to be a 2 tx block - const blockTicket = await context.orchestrator.startNewBlock(2, context.globalVariables, []); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(2, context.globalVariables, []); for (const processedTx of processed) { await context.orchestrator.addNewTx(processedTx); } - // we need to complete the block as we have not added a full set of txs - await context.orchestrator.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); }); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts index ed0c077c5c4..3722f3bf97a 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts @@ -1,4 +1,3 @@ -import { PROVING_STATUS } from '@aztec/circuit-types'; import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; import { fr } from '@aztec/circuits.js/testing'; import { range } from '@aztec/foundation/array'; @@ -28,15 +27,12 @@ describe('prover/orchestrator/blocks', () => { describe('blocks', () => { it('builds an empty L2 block', async () => { - const blockTicket = await context.orchestrator.startNewBlock(2, context.globalVariables, []); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(2, context.globalVariables, []); - await context.orchestrator.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); }); it('builds a block with 1 transaction', async () => { @@ -45,20 +41,16 @@ describe('prover/orchestrator/blocks', () => { await updateExpectedTreesFromTxs(expectsDb, txs); // This will need to be a 2 tx block - const blockTicket = await context.orchestrator.startNewBlock(2, context.globalVariables, []); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(2, context.globalVariables, []); for (const tx of txs) { await context.orchestrator.addNewTx(tx); } - // we need to complete the block as we have not added a full set of txs - await context.orchestrator.setBlockCompleted(); - - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); }); it('builds a block concurrently with transaction simulation', async () => { @@ -71,18 +63,17 @@ describe('prover/orchestrator/blocks', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - const blockTicket = await context.orchestrator.startNewBlock(txs.length, context.globalVariables, l1ToL2Messages); + context.orchestrator.startNewEpoch(1, 1); + await context.orchestrator.startNewBlock(txs.length, context.globalVariables, l1ToL2Messages); for (const tx of txs) { await context.orchestrator.addNewTx(tx); await sleep(1000); } - const result = await blockTicket.provingPromise; - expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const finalisedBlock = await context.orchestrator.finaliseBlock(); - - expect(finalisedBlock.block.number).toEqual(context.blockNumber); + const block = await context.orchestrator.setBlockCompleted(); + await context.orchestrator.finaliseEpoch(); + expect(block.number).toEqual(context.blockNumber); }); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts index 9d03a31a25e..d7863abe163 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts @@ -47,6 +47,7 @@ describe('prover/orchestrator', () => { } }); + orchestrator.startNewEpoch(1, 1); await orchestrator.startNewBlock(2, makeGlobalVariables(1), [message]); await sleep(10); diff --git a/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts b/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts index 03f965a7b39..83a2b3daf10 100644 --- a/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts +++ b/yarn-project/prover-client/src/prover-agent/agent-queue-rpc-integration.test.ts @@ -43,7 +43,6 @@ describe('Prover agent <-> queue integration', () => { getBlockMergeRollupProof: makeBlockMergeRollupInputs, getEmptyBlockRootRollupProof: makeEmptyBlockRootRollupInputs, getBlockRootRollupProof: makeBlockRootRollupInputs, - getBlockRootRollupFinalProof: makeBlockRootRollupInputs, getEmptyPrivateKernelProof: () => new PrivateKernelEmptyInputData(makeHeader(), Fr.random(), Fr.random(), Fr.random()), getEmptyTubeProof: () => new PrivateKernelEmptyInputData(makeHeader(), Fr.random(), Fr.random(), Fr.random()), diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts index ba055086efe..c380a629213 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts @@ -360,14 +360,6 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource return this.enqueue({ type: ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, inputs: input }, signal, epochNumber); } - getBlockRootRollupFinalProof( - input: BlockRootRollupInputs, - signal?: AbortSignal, - epochNumber?: number, - ): Promise> { - return this.enqueue({ type: ProvingRequestType.BLOCK_ROOT_ROLLUP_FINAL, inputs: input }, signal, epochNumber); - } - /** * Creates a proof for the given input. * @param input - Input to the circuit. diff --git a/yarn-project/prover-client/src/prover-agent/prover-agent.ts b/yarn-project/prover-client/src/prover-agent/prover-agent.ts index b9dce387e21..61b3172d548 100644 --- a/yarn-project/prover-client/src/prover-agent/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-agent/prover-agent.ts @@ -189,10 +189,6 @@ export class ProverAgent { return this.circuitProver.getBlockRootRollupProof(inputs); } - case ProvingRequestType.BLOCK_ROOT_ROLLUP_FINAL: { - return this.circuitProver.getBlockRootRollupFinalProof(inputs); - } - case ProvingRequestType.BLOCK_MERGE_ROLLUP: { return this.circuitProver.getBlockMergeRollupProof(inputs); } diff --git a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts index a95ee46bc9c..ee3dc99956d 100644 --- a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts @@ -1,5 +1,5 @@ import { BBNativeRollupProver, type BBProverConfig } from '@aztec/bb-prover'; -import { PROVING_STATUS, mockTx } from '@aztec/circuit-types'; +import { mockTx } from '@aztec/circuit-types'; import { Fr, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; @@ -32,14 +32,14 @@ describe('prover/bb_prover/full-rollup', () => { it.each([ [1, 1, 0, 2], // Epoch with a single block, requires one padding block proof [2, 2, 0, 2], // Full epoch with two blocks - [2, 3, 0, 2], // Epoch with two blocks but the block merge tree was assembled as with 3 leaves, requires one padding block proof + // [2, 3, 0, 2], // Epoch with two blocks but the block merge tree was assembled as with 3 leaves, requires one padding block proof; commented out to reduce running time ])( 'proves a private-only epoch with %i/%i blocks with %i/%i non-empty txs each', async (blockCount, totalBlocks, nonEmptyTxs, totalTxs) => { log.info(`Proving epoch with ${blockCount}/${totalBlocks} blocks with ${nonEmptyTxs}/${totalTxs} non-empty txs`); const initialHeader = context.actualDb.getInitialHeader(); - const provingTicket = context.orchestrator.startNewEpoch(1, totalBlocks); + context.orchestrator.startNewEpoch(1, totalBlocks); for (let blockNum = 1; blockNum <= blockCount; blockNum++) { const globals = makeGlobals(blockNum); @@ -55,7 +55,7 @@ describe('prover/bb_prover/full-rollup', () => { log.info(`Starting new block #${blockNum}`); await context.orchestrator.startNewBlock(totalTxs, globals, l1ToL2Messages); log.info(`Processing public functions`); - const [processed, failed] = await context.processPublicFunctions(txs, nonEmptyTxs, context.blockProver); + const [processed, failed] = await context.processPublicFunctions(txs, nonEmptyTxs, context.epochProver); expect(processed.length).toBe(nonEmptyTxs); expect(failed.length).toBe(0); @@ -63,13 +63,8 @@ describe('prover/bb_prover/full-rollup', () => { await context.orchestrator.setBlockCompleted(); } - log.info(`Setting epoch as completed`); - context.orchestrator.setEpochCompleted(); - log.info(`Awaiting proofs`); - const provingResult = await provingTicket.provingPromise; - expect(provingResult.status).toBe(PROVING_STATUS.SUCCESS); - const epochResult = context.orchestrator.finaliseEpoch(); + const epochResult = await context.orchestrator.finaliseEpoch(); await expect(prover.verifyProof('RootRollupArtifact', epochResult.proof)).resolves.not.toThrow(); @@ -102,25 +97,18 @@ describe('prover/bb_prover/full-rollup', () => { Fr.random, ); - const provingTicket = await context.orchestrator.startNewBlock( - numTransactions, - context.globalVariables, - l1ToL2Messages, - ); + context.orchestrator.startNewEpoch(1, 1); + + await context.orchestrator.startNewBlock(numTransactions, context.globalVariables, l1ToL2Messages); - const [processed, failed] = await context.processPublicFunctions(txs, numTransactions, context.blockProver); + const [processed, failed] = await context.processPublicFunctions(txs, numTransactions, context.epochProver); expect(processed.length).toBe(numTransactions); expect(failed.length).toBe(0); await context.orchestrator.setBlockCompleted(); - const provingResult = await provingTicket.provingPromise; - - expect(provingResult.status).toBe(PROVING_STATUS.SUCCESS); - - const blockResult = await context.orchestrator.finaliseBlock(); - - await expect(prover.verifyProof('RootRollupArtifact', blockResult.proof)).resolves.not.toThrow(); + const result = await context.orchestrator.finaliseEpoch(); + await expect(prover.verifyProof('RootRollupArtifact', result.proof)).resolves.not.toThrow(); }); }); diff --git a/yarn-project/prover-client/src/test/mock_prover.ts b/yarn-project/prover-client/src/test/mock_prover.ts index 49bf7abd5bd..0564d0bfd44 100644 --- a/yarn-project/prover-client/src/test/mock_prover.ts +++ b/yarn-project/prover-client/src/test/mock_prover.ts @@ -98,16 +98,6 @@ export class MockProver implements ServerCircuitProver { ); } - getBlockRootRollupFinalProof(): Promise> { - return Promise.resolve( - makePublicInputsAndRecursiveProof( - makeBlockRootOrBlockMergeRollupPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - ); - } - getEmptyPrivateKernelProof(): Promise> { return Promise.resolve( makePublicInputsAndRecursiveProof( diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 495759ed122..f06757dac0a 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -4,7 +4,6 @@ import { type L1ToL2MessageSource, type L2Block, type L2BlockSource, - PROVING_STATUS, type ProcessedTx, type ProverCoordination, type Tx, @@ -67,7 +66,7 @@ export class EpochProvingJob { const timer = new Timer(); try { - const provingTicket = this.prover.startNewEpoch(epochNumber, epochSize); + this.prover.startNewEpoch(epochNumber, epochSize); let previousHeader = (await this.l2BlockSource.getBlock(fromBlock - 1))?.header; for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) { @@ -108,16 +107,8 @@ export class EpochProvingJob { previousHeader = block.header; } - // Pad epoch with empty block proofs if needed - this.prover.setEpochCompleted(); - this.state = 'awaiting-prover'; - const result = await provingTicket.provingPromise; - if (result.status === PROVING_STATUS.FAILURE) { - throw new Error(`Epoch proving failed: ${result.reason}`); - } - - const { publicInputs, proof } = this.prover.finaliseEpoch(); + const { publicInputs, proof } = await this.prover.finaliseEpoch(); this.log.info(`Finalised proof for epoch`, { epochNumber, fromBlock, toBlock, uuid: this.uuid }); this.state = 'publishing-proof'; diff --git a/yarn-project/sequencer-client/src/block_builder/index.ts b/yarn-project/sequencer-client/src/block_builder/index.ts index 077b1049650..60cc8b9cf9d 100644 --- a/yarn-project/sequencer-client/src/block_builder/index.ts +++ b/yarn-project/sequencer-client/src/block_builder/index.ts @@ -1,7 +1,7 @@ -import { type BlockSimulator, type MerkleTreeOperations } from '@aztec/circuit-types'; +import { type BlockBuilder, type MerkleTreeOperations } from '@aztec/circuit-types'; export * from './orchestrator.js'; export * from './light.js'; export interface BlockBuilderFactory { - create(db: MerkleTreeOperations): BlockSimulator; + create(db: MerkleTreeOperations): BlockBuilder; } diff --git a/yarn-project/sequencer-client/src/block_builder/light.test.ts b/yarn-project/sequencer-client/src/block_builder/light.test.ts index 25c041b4fa8..1fb44611b38 100644 --- a/yarn-project/sequencer-client/src/block_builder/light.test.ts +++ b/yarn-project/sequencer-client/src/block_builder/light.test.ts @@ -169,9 +169,8 @@ describe('LightBlockBuilder', () => { for (const tx of txs) { await builder.addNewTx(tx); } - await builder.setBlockCompleted(); - const result = await builder.finaliseBlock(); - return result.block.header; + const { header } = await builder.setBlockCompleted(); + return header; }; // Builds the block header using circuit outputs diff --git a/yarn-project/sequencer-client/src/block_builder/light.ts b/yarn-project/sequencer-client/src/block_builder/light.ts index 7443087bd6a..58c741faec0 100644 --- a/yarn-project/sequencer-client/src/block_builder/light.ts +++ b/yarn-project/sequencer-client/src/block_builder/light.ts @@ -1,14 +1,11 @@ import { createDebugLogger } from '@aztec/aztec.js'; import { - type BlockSimulator, + type BlockBuilder, Body, L2Block, MerkleTreeId, type MerkleTreeOperations, - PROVING_STATUS, type ProcessedTx, - type ProvingTicket, - type SimulationBlockResult, type TxEffect, makeEmptyProcessedTx, toTxEffect, @@ -28,12 +25,9 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; /** - * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. - * This class is temporary and should die once we switch from tx effects to tx objects submissions, since sequencers won't have - * the need to create L2 block headers to submit to L1. When we do that, we should also remove the references to the - * prover-client and bb-prover packages from this package. + * Builds a block and its header from a set of processed tx without running any circuits. */ -export class LightweightBlockBuilder implements BlockSimulator { +export class LightweightBlockBuilder implements BlockBuilder { private numTxs?: number; private globalVariables?: GlobalVariables; private l1ToL2Messages?: Fr[]; @@ -44,7 +38,7 @@ export class LightweightBlockBuilder implements BlockSimulator { constructor(private db: MerkleTreeOperations, private telemetry: TelemetryClient) {} - async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { + async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { this.logger.verbose('Starting new block', { numTxs, globalVariables, l1ToL2Messages }); this.numTxs = numTxs; this.globalVariables = globalVariables; @@ -52,9 +46,6 @@ export class LightweightBlockBuilder implements BlockSimulator { // Update L1 to L2 tree await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!); - - // Nothing to prove, so we return an already resolved promise - return { provingPromise: Promise.resolve({ status: PROVING_STATUS.SUCCESS }) }; } async addNewTx(tx: ProcessedTx): Promise { @@ -69,9 +60,7 @@ export class LightweightBlockBuilder implements BlockSimulator { ); } - cancel(): void {} - - async setBlockCompleted(): Promise { + async setBlockCompleted(): Promise { const paddingTxCount = this.numTxs! - this.txs.length; this.logger.verbose(`Setting block as completed and adding ${paddingTxCount} padding txs`); for (let i = 0; i < paddingTxCount; i++) { @@ -84,9 +73,11 @@ export class LightweightBlockBuilder implements BlockSimulator { ), ); } + + return this.buildBlock(); } - async finaliseBlock(): Promise { + private async buildBlock(): Promise { this.logger.verbose(`Finalising block`); const nonEmptyTxEffects: TxEffect[] = this.txs .map(tx => toTxEffect(tx, this.globalVariables!.gasFees)) @@ -98,14 +89,14 @@ export class LightweightBlockBuilder implements BlockSimulator { const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db); const block = new L2Block(newArchive, header, body); - return { block }; + return block; } } export class LightweightBlockBuilderFactory { constructor(private telemetry?: TelemetryClient) {} - create(db: MerkleTreeOperations): BlockSimulator { + create(db: MerkleTreeOperations): BlockBuilder { return new LightweightBlockBuilder(db, this.telemetry ?? new NoopTelemetryClient()); } } diff --git a/yarn-project/sequencer-client/src/block_builder/orchestrator.ts b/yarn-project/sequencer-client/src/block_builder/orchestrator.ts index 31f00f26ea5..b22f8922d85 100644 --- a/yarn-project/sequencer-client/src/block_builder/orchestrator.ts +++ b/yarn-project/sequencer-client/src/block_builder/orchestrator.ts @@ -1,11 +1,5 @@ import { TestCircuitProver } from '@aztec/bb-prover'; -import { - type BlockSimulator, - type MerkleTreeOperations, - type ProcessedTx, - type ProvingTicket, - type SimulationBlockResult, -} from '@aztec/circuit-types'; +import { type BlockBuilder, type L2Block, type MerkleTreeOperations, type ProcessedTx } from '@aztec/circuit-types'; import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; import { ProvingOrchestrator } from '@aztec/prover-client/orchestrator'; import { type SimulationProvider } from '@aztec/simulator'; @@ -14,27 +8,20 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; /** * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. - * This class is temporary and should die once we switch from tx effects to tx objects submissions, since sequencers won't have - * the need to create L2 block headers to submit to L1. When we do that, we should also remove the references to the - * prover-client and bb-prover packages from this package. + * This class is unused at the moment, but could be leveraged by a prover-node to ascertain that it can prove a block before + * committing to proving it by sending a quote. */ -export class OrchestratorBlockBuilder implements BlockSimulator { +export class OrchestratorBlockBuilder implements BlockBuilder { private orchestrator: ProvingOrchestrator; constructor(db: MerkleTreeOperations, simulationProvider: SimulationProvider, telemetry: TelemetryClient) { const testProver = new TestCircuitProver(telemetry, simulationProvider); this.orchestrator = new ProvingOrchestrator(db, testProver, telemetry); } - startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { + startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { return this.orchestrator.startNewBlock(numTxs, globalVariables, l1ToL2Messages); } - cancel(): void { - this.orchestrator.cancel(); - } - finaliseBlock(): Promise { - return this.orchestrator.finaliseBlock(); - } - setBlockCompleted(): Promise { + setBlockCompleted(): Promise { return this.orchestrator.setBlockCompleted(); } addNewTx(tx: ProcessedTx): Promise { @@ -45,7 +32,7 @@ export class OrchestratorBlockBuilder implements BlockSimulator { export class OrchestratorBlockBuilderFactory { constructor(private simulationProvider: SimulationProvider, private telemetry?: TelemetryClient) {} - create(db: MerkleTreeOperations): BlockSimulator { + create(db: MerkleTreeOperations): BlockBuilder { return new OrchestratorBlockBuilder(db, this.simulationProvider, this.telemetry ?? new NoopTelemetryClient()); } } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 02771aaf1d0..c074423b057 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -95,20 +95,6 @@ type L1ProcessArgs = { attestations?: Signature[]; }; -/** Arguments to the submitProof method of the rollup contract */ -type L1SubmitBlockProofArgs = { - /** The L2 block header. */ - header: Buffer; - /** A root of the archive tree after the L2 block is applied. */ - archive: Buffer; - /** Identifier of the prover. */ - proverId: Buffer; - /** The proof for the block. */ - proof: Buffer; - /** The aggregation object for the block's proof. */ - aggregationObject: Buffer; -}; - /** Arguments to the submitEpochProof method of the rollup contract */ export type L1SubmitEpochProofArgs = { epochSize: number; @@ -406,58 +392,6 @@ export class L1Publisher { return false; } - public async submitBlockProof( - header: Header, - archiveRoot: Fr, - proverId: Fr, - aggregationObject: Fr[], - proof: Proof, - ): Promise { - const ctx = { blockNumber: header.globalVariables.blockNumber, slotNumber: header.globalVariables.slotNumber }; - - const txArgs: L1SubmitBlockProofArgs = { - header: header.toBuffer(), - archive: archiveRoot.toBuffer(), - proverId: proverId.toBuffer(), - aggregationObject: serializeToBuffer(aggregationObject), - proof: proof.withoutPublicInputs(), - }; - - // Process block - if (!this.interrupted) { - const timer = new Timer(); - const txHash = await this.sendSubmitBlockProofTx(txArgs); - if (!txHash) { - return false; - } - - const receipt = await this.getTransactionReceipt(txHash); - if (!receipt) { - return false; - } - - // Tx was mined successfully - if (receipt.status) { - const tx = await this.getTransactionStats(txHash); - const stats: L1PublishProofStats = { - ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), - ...pick(tx!, 'calldataGas', 'calldataSize'), - eventName: 'proof-published-to-l1', - }; - this.log.info(`Published proof to L1 rollup contract`, { ...stats, ...ctx }); - this.metrics.recordSubmitProof(timer.ms(), stats); - return true; - } - - this.metrics.recordFailedTx('submitProof'); - this.log.error(`Rollup.submitProof tx status failed: ${receipt.transactionHash}`, ctx); - await this.sleepOrInterrupted(); - } - - this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); - return false; - } - public async submitEpochProof(args: { epochNumber: number; fromBlock: number; @@ -578,33 +512,6 @@ export class L1Publisher { this.interrupted = false; } - private async sendSubmitBlockProofTx(submitProofArgs: L1SubmitBlockProofArgs): Promise { - try { - const size = Object.values(submitProofArgs).reduce((acc, arg) => acc + arg.length, 0); - this.log.info(`SubmitProof size=${size} bytes`); - - const { header, archive, proverId, aggregationObject, proof } = submitProofArgs; - const args = [ - `0x${header.toString('hex')}`, - `0x${archive.toString('hex')}`, - `0x${proverId.toString('hex')}`, - `0x${aggregationObject.toString('hex')}`, - `0x${proof.toString('hex')}`, - ] as const; - - await this.rollupContract.simulate.submitBlockRootProof(args, { - account: this.account, - }); - - return await this.rollupContract.write.submitBlockRootProof(args, { - account: this.account, - }); - } catch (err) { - this.log.error(`Rollup submit proof failed`, err); - return undefined; - } - } - private async sendSubmitEpochProofTx(args: { fromBlock: number; toBlock: number; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index bf3e21f0e6a..cb648f3db61 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -1,7 +1,7 @@ import { BlockAttestation, + type BlockBuilder, BlockProposal, - type BlockSimulator, ConsensusPayload, type EpochProofQuote, type L1ToL2MessageSource, @@ -9,9 +9,6 @@ import { type L2BlockSource, type MerkleTreeAdminOperations, MerkleTreeId, - PROVING_STATUS, - type ProvingSuccess, - type ProvingTicket, type Tx, TxHash, type UnencryptedL2Log, @@ -56,7 +53,7 @@ describe('sequencer', () => { let globalVariableBuilder: MockProxy; let p2p: MockProxy; let worldState: MockProxy; - let blockSimulator: MockProxy; + let blockBuilder: MockProxy; let merkleTreeOps: MockProxy; let publicProcessor: MockProxy; let l2BlockSource: MockProxy; @@ -119,7 +116,7 @@ describe('sequencer', () => { globalVariableBuilder = mock(); merkleTreeOps = mock(); - blockSimulator = mock(); + blockBuilder = mock(); p2p = mock({ getStatus: mockFn().mockResolvedValue({ state: P2PClientState.IDLE, syncedToL2Block: lastBlockNumber }), @@ -158,7 +155,7 @@ describe('sequencer', () => { }); const blockBuilderFactory = mock({ - create: () => blockSimulator, + create: () => blockBuilder, }); validatorClient = mock({ @@ -187,16 +184,8 @@ describe('sequencer', () => { tx.data.constants.txContext.chainId = chainId; const txHash = tx.getTxHash(); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; - p2p.getTxs.mockReturnValueOnce([tx]); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -204,7 +193,7 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 2, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), @@ -212,23 +201,15 @@ describe('sequencer', () => { // Ok, we have an issue that we never actually call the process L2 block expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block when it is their turn', async () => { const tx = mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; const txHash = tx.getTxHash(); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; p2p.getTxs.mockReturnValue([tx]); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); @@ -239,7 +220,7 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(blockSimulator.startNewBlock).not.toHaveBeenCalled(); + expect(blockBuilder.startNewBlock).not.toHaveBeenCalled(); // Now we can propose, but lets assume that the content is still "bad" (missing sigs etc) publisher.canProposeAtNextEthBlock.mockResolvedValue([ @@ -248,20 +229,19 @@ describe('sequencer', () => { ]); await sequencer.work(); - expect(blockSimulator.startNewBlock).not.toHaveBeenCalled(); + expect(blockBuilder.startNewBlock).not.toHaveBeenCalled(); // Now it is! publisher.validateBlockForSubmission.mockClear(); publisher.validateBlockForSubmission.mockResolvedValue(); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 2, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs rejecting double spends', async () => { @@ -273,16 +253,9 @@ describe('sequencer', () => { const validTxHashes = txs.filter((_, i) => i !== doubleSpendTxIndex).map(tx => tx.getTxHash()); const doubleSpendTx = txs[doubleSpendTxIndex]; - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; p2p.getTxs.mockReturnValueOnce(txs); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -298,14 +271,13 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 2, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs rejecting incorrect chain ids', async () => { @@ -317,16 +289,8 @@ describe('sequencer', () => { const invalidChainTx = txs[invalidChainTxIndex]; const validTxHashes = txs.filter((_, i) => i !== invalidChainTxIndex).map(tx => tx.getTxHash()); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; - p2p.getTxs.mockReturnValueOnce(txs); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -337,14 +301,13 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 2, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs dropping the ones that go over max size', async () => { @@ -356,16 +319,8 @@ describe('sequencer', () => { }); const validTxHashes = txs.filter((_, i) => i !== invalidTransactionIndex).map(tx => tx.getTxHash()); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; - p2p.getTxs.mockReturnValueOnce(txs); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -378,13 +333,12 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 2, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block once it reaches the minimum number of transactions', async () => { @@ -394,15 +348,8 @@ describe('sequencer', () => { return tx; }); const block = L2Block.random(lastBlockNumber + 1); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -415,27 +362,26 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce([]); //p2p.getTxs.mockReturnValueOnce(txs.slice(0, 4)); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs p2p.getTxs.mockReturnValueOnce(txs.slice(0, 3)); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is built with 4 txs p2p.getTxs.mockReturnValueOnce(txs.slice(0, 4)); const txHashes = txs.slice(0, 4).map(tx => tx.getTxHash()); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 4, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes, undefined); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block that contains zero real transactions once flushed', async () => { @@ -445,15 +391,8 @@ describe('sequencer', () => { return tx; }); const block = L2Block.random(lastBlockNumber + 1); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -465,12 +404,12 @@ describe('sequencer', () => { // block is not built with 0 txs p2p.getTxs.mockReturnValueOnce([]); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs p2p.getTxs.mockReturnValueOnce(txs.slice(0, 3)); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // flush the sequencer and it should build a block sequencer.flush(); @@ -478,15 +417,14 @@ describe('sequencer', () => { // block is built with 0 txs p2p.getTxs.mockReturnValueOnce([]); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(1); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(1); + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 2, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [], undefined); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('builds a block that contains less than the minimum number of transactions once flushed', async () => { @@ -496,15 +434,8 @@ describe('sequencer', () => { return tx; }); const block = L2Block.random(lastBlockNumber + 1); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); @@ -516,12 +447,12 @@ describe('sequencer', () => { // block is not built with 0 txs p2p.getTxs.mockReturnValueOnce([]); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs p2p.getTxs.mockReturnValueOnce(txs.slice(0, 3)); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(0); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // flush the sequencer and it should build a block sequencer.flush(); @@ -531,8 +462,8 @@ describe('sequencer', () => { p2p.getTxs.mockReturnValueOnce(postFlushTxs); const postFlushTxHashes = postFlushTxs.map(tx => tx.getTxHash()); await sequencer.work(); - expect(blockSimulator.startNewBlock).toHaveBeenCalledTimes(1); - expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(1); + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( 3, mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), @@ -540,22 +471,14 @@ describe('sequencer', () => { expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), postFlushTxHashes, undefined); - expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); it('aborts building a block if the chain moves underneath it', async () => { const tx = mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; p2p.getTxs.mockReturnValueOnce([tx]); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); publisher.proposeL2Block.mockResolvedValueOnce(true); const mockedGlobalVariables = new GlobalVariables( @@ -631,16 +554,9 @@ describe('sequencer', () => { const tx = mockTxForRollup(); tx.data.constants.txContext.chainId = chainId; txHash = tx.getTxHash(); - const result: ProvingSuccess = { - status: PROVING_STATUS.SUCCESS, - }; - const ticket: ProvingTicket = { - provingPromise: Promise.resolve(result), - }; p2p.getTxs.mockReturnValue([tx]); - blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); - blockSimulator.finaliseBlock.mockResolvedValue({ block }); + blockBuilder.setBlockCompleted.mockResolvedValue(block); }; it('submits a valid proof quote with a block', async () => { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 5e7bd9a4ae3..0195d1afc3a 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -11,7 +11,7 @@ import { type WorldStateStatus, type WorldStateSynchronizer, } from '@aztec/circuit-types'; -import { type AllowedElement, BlockProofError, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; +import { type AllowedElement, BlockProofError } from '@aztec/circuit-types/interfaces'; import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { AppendOnlyTreeSnapshot, @@ -433,7 +433,7 @@ export class Sequencer { const blockBuildingTimer = new Timer(); const blockBuilder = this.blockBuilderFactory.create(this.worldState.getLatest()); - const blockTicket = await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); + await blockBuilder.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => processor.process(validTxs, blockSize, blockBuilder, this.txValidatorFactory.validatorForProcessedTxs()), @@ -452,23 +452,15 @@ export class Sequencer { processedTxsCount: processedTxs.length, }) ) { - blockBuilder.cancel(); + // TODO: Roll back changes to world state throw new Error('Should not propose the block'); } // All real transactions have been added, set the block as full and complete the proving. - await blockBuilder.setBlockCompleted(); + const block = await blockBuilder.setBlockCompleted(); - // Here we are now waiting for the block to be proven (using simulated[fake] proofs). // TODO(@PhilWindle) We should probably periodically check for things like another // block being published before ours instead of just waiting on our block - const result = await blockTicket.provingPromise; - if (result.status === PROVING_STATUS.FAILURE) { - throw new Error(`Block proving failed, reason: ${result.reason}`); - } - - // Block is ready, now finalise - const { block } = await blockBuilder.finaliseBlock(); await this.publisher.validateBlockForSubmission(block.header); diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index f251a142dac..8ebde5516a7 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -1,6 +1,6 @@ import { - type BlockProver, type ProcessedTx, + type ProcessedTxHandler, PublicDataWrite, PublicKernelPhase, SimulationError, @@ -59,7 +59,7 @@ describe('public_processor', () => { let db: MockProxy; let publicExecutor: MockProxy; let worldStateDB: MockProxy; - let prover: MockProxy; + let handler: MockProxy; let proof: ClientIvcProof; let root: Buffer; @@ -70,7 +70,7 @@ describe('public_processor', () => { db = mock(); publicExecutor = mock(); worldStateDB = mock(); - prover = mock(); + handler = mock(); proof = ClientIvcProof.empty(); root = Buffer.alloc(32, 5); @@ -99,7 +99,7 @@ describe('public_processor', () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 0, numberOfRevertiblePublicCallRequests: 0 }); const hash = tx.getTxHash(); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed.length).toBe(1); @@ -123,21 +123,21 @@ describe('public_processor', () => { expect(processed[0]).toEqual(expected); expect(failed).toEqual([]); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('returns failed txs without aborting entire operation', async function () { publicExecutor.simulate.mockRejectedValue(new SimulationError(`Failed`, [])); const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 0, numberOfRevertiblePublicCallRequests: 1 }); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toEqual([]); expect(failed[0].tx).toEqual(tx); expect(failed[0].error).toEqual(new SimulationError(`Failed`, [])); expect(worldStateDB.commit).toHaveBeenCalledTimes(0); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); - expect(prover.addNewTx).toHaveBeenCalledTimes(0); + expect(handler.addNewTx).toHaveBeenCalledTimes(0); }); }); @@ -200,7 +200,7 @@ describe('public_processor', () => { hasLogs: true, }); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(failed.map(f => f.error)).toEqual([]); expect(processed).toHaveLength(1); @@ -214,7 +214,7 @@ describe('public_processor', () => { expect(processed[0].encryptedLogs.getTotalLogCount()).toBe(6); expect(processed[0].unencryptedLogs.getTotalLogCount()).toBe(2); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('runs a tx with an enqueued public call with nested execution', async function () { @@ -233,7 +233,7 @@ describe('public_processor', () => { publicExecutor.simulate.mockResolvedValue(publicExecutionResult); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -246,7 +246,7 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(1); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('does not attempt to overfill a block', async function () { @@ -255,7 +255,7 @@ describe('public_processor', () => { ); // We are passing 3 txs but only 2 can fit in the block - const [processed, failed] = await processor.process(txs, 2, prover); + const [processed, failed] = await processor.process(txs, 2, handler); expect(processed).toHaveLength(2); expect(processed[0].hash).toEqual(txs[0].getTxHash()); @@ -267,8 +267,8 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(2); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[1]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[1]); }); it('does not send a transaction to the prover if validation fails', async function () { @@ -277,13 +277,13 @@ describe('public_processor', () => { const txValidator: MockProxy> = mock(); txValidator.validateTxs.mockRejectedValue([[], [tx]]); - const [processed, failed] = await processor.process([tx], 1, prover, txValidator); + const [processed, failed] = await processor.process([tx], 1, handler, txValidator); expect(processed).toHaveLength(0); expect(failed).toHaveLength(1); expect(publicExecutor.simulate).toHaveBeenCalledTimes(1); - expect(prover.addNewTx).toHaveBeenCalledTimes(0); + expect(handler.addNewTx).toHaveBeenCalledTimes(0); }); it('rolls back app logic db updates on failed public execution, but persists setup', async function () { @@ -367,7 +367,7 @@ describe('public_processor', () => { const innerSpy = jest.spyOn(publicKernel, 'publicKernelCircuitInner'); const mergeSpy = jest.spyOn(publicKernel, 'publicKernelCircuitMerge'); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -401,7 +401,7 @@ describe('public_processor', () => { expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(3); expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(1); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('fails a transaction that reverts in setup', async function () { @@ -472,7 +472,7 @@ describe('public_processor', () => { const innerSpy = jest.spyOn(publicKernel, 'publicKernelCircuitInner'); const mergeSpy = jest.spyOn(publicKernel, 'publicKernelCircuitMerge'); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(0); expect(failed).toHaveLength(1); @@ -487,7 +487,7 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(0); expect(worldStateDB.rollbackToCommit).toHaveBeenCalledTimes(1); - expect(prover.addNewTx).toHaveBeenCalledTimes(0); + expect(handler.addNewTx).toHaveBeenCalledTimes(0); }); it('includes a transaction that reverts in teardown', async function () { @@ -567,7 +567,7 @@ describe('public_processor', () => { const innerSpy = jest.spyOn(publicKernel, 'publicKernelCircuitInner'); const mergeSpy = jest.spyOn(publicKernel, 'publicKernelCircuitMerge'); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -600,7 +600,7 @@ describe('public_processor', () => { expect(processed[0].data.revertCode).toEqual(RevertCode.TEARDOWN_REVERTED); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('includes a transaction that reverts in app logic and teardown', async function () { @@ -681,7 +681,7 @@ describe('public_processor', () => { const innerSpy = jest.spyOn(publicKernel, 'publicKernelCircuitInner'); const mergeSpy = jest.spyOn(publicKernel, 'publicKernelCircuitMerge'); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -714,7 +714,7 @@ describe('public_processor', () => { expect(processed[0].data.revertCode).toEqual(RevertCode.BOTH_REVERTED); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('runs a tx with all phases', async function () { @@ -833,7 +833,7 @@ describe('public_processor', () => { const mergeSpy = jest.spyOn(publicKernel, 'publicKernelCircuitMerge'); const tailSpy = jest.spyOn(publicKernel, 'publicKernelCircuitTail'); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -880,7 +880,7 @@ describe('public_processor', () => { expect(txEffect.encryptedLogs.getTotalLogCount()).toBe(0); expect(txEffect.unencryptedLogs.getTotalLogCount()).toBe(0); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('runs a tx with only teardown', async function () { @@ -942,7 +942,7 @@ describe('public_processor', () => { const mergeSpy = jest.spyOn(publicKernel, 'publicKernelCircuitMerge'); const tailSpy = jest.spyOn(publicKernel, 'publicKernelCircuitTail'); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -975,7 +975,7 @@ describe('public_processor', () => { Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(failed.map(f => f.error)).toEqual([]); expect(processed).toHaveLength(1); @@ -992,7 +992,7 @@ describe('public_processor', () => { }), ); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('injects balance update with public enqueued call', async function () { @@ -1015,7 +1015,7 @@ describe('public_processor', () => { Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(failed.map(f => f.error)).toEqual([]); expect(processed).toHaveLength(1); @@ -1034,7 +1034,7 @@ describe('public_processor', () => { }), ); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('tweaks existing balance update from claim', async function () { @@ -1063,7 +1063,7 @@ describe('public_processor', () => { sideEffectCounter: 0, }); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(failed.map(f => f.error)).toEqual([]); expect(processed).toHaveLength(1); @@ -1082,7 +1082,7 @@ describe('public_processor', () => { }), ); - expect(prover.addNewTx).toHaveBeenCalledWith(processed[0]); + expect(handler.addNewTx).toHaveBeenCalledWith(processed[0]); }); it('rejects tx if fee payer has not enough balance', async function () { @@ -1105,7 +1105,7 @@ describe('public_processor', () => { Promise.resolve(computePublicDataTreeLeafSlot(address, slot).toBigInt()), ); - const [processed, failed] = await processor.process([tx], 1, prover); + const [processed, failed] = await processor.process([tx], 1, handler); expect(processed).toHaveLength(0); expect(failed).toHaveLength(1);