From 3e247ebd8aee476f77d3ddd5aa7149c326a68f8c Mon Sep 17 00:00:00 2001 From: agolajko <57454127+agolajko@users.noreply.github.com> Date: Fri, 15 Sep 2023 10:43:45 +0100 Subject: [PATCH 01/11] Fix: replaced hardhat run with ts-node (#18) Co-authored-by: agolajko Co-authored-by: Ramon Canales --- zksync/package.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/zksync/package.json b/zksync/package.json index 4cb76b95b7..8cebae64b7 100644 --- a/zksync/package.json +++ b/zksync/package.json @@ -34,13 +34,13 @@ "build": "hardhat compile", "clean": "hardhat clean", "verify": "hardhat run src/verify.ts", - "deploy-testnet-paymaster": "hardhat run src/deployTestnetPaymaster.ts", - "deploy-force-deploy-upgrader": "hardhat run src/deployForceDeployUpgrader.ts", - "publish-bridge-preimages": "hardhat run src/publish-bridge-preimages.ts", + "deploy-testnet-paymaster": "ts-node src/deployTestnetPaymaster.ts", + "deploy-force-deploy-upgrader": "ts-node src/deployForceDeployUpgrader.ts", + "publish-bridge-preimages": "ts-node src/publish-bridge-preimages.ts", "deploy-l2-weth": "ts-node src/deployL2Weth.ts", "upgrade-l2-erc20-contract": "ts-node src/upgradeL2BridgeImpl.ts" }, "dependencies": { "dotenv": "^16.0.3" } -} +} \ No newline at end of file From 675be2e4b879aab5c12e7073972688e4237e5b7e Mon Sep 17 00:00:00 2001 From: Yury Akudovich Date: Fri, 15 Sep 2023 14:32:53 +0200 Subject: [PATCH 02/11] Automatically runs license scan in all subdirectories with yarn.lock. (#14) --- .github/workflows/license.yaml | 54 ----------------------- .github/workflows/nodejs-license.yaml | 63 +++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 54 deletions(-) delete mode 100644 .github/workflows/license.yaml create mode 100644 .github/workflows/nodejs-license.yaml diff --git a/.github/workflows/license.yaml b/.github/workflows/license.yaml deleted file mode 100644 index afc7d22000..0000000000 --- a/.github/workflows/license.yaml +++ /dev/null @@ -1,54 +0,0 @@ -name: CI - -on: - pull_request - -env: - ALLOWED_LICENSES: > - MIT; - BSD; - ISC; - Apache-2.0; - MPL-2.0; - LGPL-3.0; - LGPL-3.0-or-later; - CC0-1.0; - CC-BY-3.0; - CC-BY-4.0; - Python-2.0; - PSF; - Public Domain; - WTFPL; - Unlicense; - # It has to be one line, there must be no space between packages. - EXCLUDE_PACKAGES: testrpc@0.0.1;uuid@2.0.1; - -jobs: - license-check: - runs-on: ubuntu-latest - steps: - - name: Checkout latest code - uses: actions/checkout@v3 - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: '16.15.1' - - - name: Install yarn - run: npm install -g yarn license-checker - - - name: Install dependencies in ethereum - run: cd ethereum && yarn install - - - name: Check licenses in ethereum - working-directory: ethereum - run: npx license-checker --json --onlyAllow="$ALLOWED_LICENSES" --excludePackages "$EXCLUDE_PACKAGES" - - - name: Install dependencies in zksync - run: cd zksync && yarn install - - - name: Check licenses in zksync - working-directory: zksync - run: npx license-checker --json --onlyAllow="$ALLOWED_LICENSES" --excludePackages "$EXCLUDE_PACKAGES" - diff --git a/.github/workflows/nodejs-license.yaml b/.github/workflows/nodejs-license.yaml new file mode 100644 index 0000000000..5d4041998e --- /dev/null +++ b/.github/workflows/nodejs-license.yaml @@ -0,0 +1,63 @@ +name: CI + +on: + - pull_request + +env: + ALLOWED_LICENSES: > + MIT; + BSD; + ISC; + Apache-2.0; + MPL-2.0; + LGPL-3.0; + LGPL-3.0-or-later; + CC0-1.0; + CC-BY-3.0; + CC-BY-4.0; + Python-2.0; + PSF; + Public Domain; + WTFPL; + Unlicense; + # It has to be one line, there must be no space between packages. + EXCLUDE_PACKAGES: testrpc@0.0.1;uuid@2.0.1; + +jobs: + generate-matrix: + name: Lists modules + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v3 + - run: | + DIRS=$(find -not \( -path \*node_modules -prune \) -type f -name yarn.lock | xargs dirname | awk -v RS='' -v OFS='","' 'NF { $1 = $1; print "\"" $0 "\"" }') + echo "matrix=[${DIRS}]" >> $GITHUB_OUTPUT + id: set-matrix + + license-check: + needs: [generate-matrix] + runs-on: ubuntu-latest + strategy: + matrix: + dir: ${{ fromJson(needs.generate-matrix.outputs.matrix) }} + steps: + - name: Checkout latest code + uses: actions/checkout@v3 + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: '16.15.1' + + - name: Install yarn + run: npm install -g yarn license-checker + + - name: Install dependencies in ${{ matrix.dir }} + working-directory: ${{ matrix.dir }} + run: yarn install + + - name: Check licenses in ${{ matrix.dir }} + working-directory: ${{ matrix.dir }} + run: npx license-checker --json --onlyAllow="$ALLOWED_LICENSES" --excludePackages "$EXCLUDE_PACKAGES" From b14b9a6e5d5d4db52abd97721aeb2722167d1144 Mon Sep 17 00:00:00 2001 From: Shahar Kaminsky Date: Sat, 16 Sep 2023 07:27:37 +0300 Subject: [PATCH 03/11] Add FOS Templates (#20) --- .github/ISSUE_TEMPLATE/bug_report.md | 39 ++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 21 +++++++ .github/SECURITY.md | 74 +++++++++++++++++++++++ .github/pull_request_template.md | 20 ++++++ CONTRIBUTING.md | 45 ++++++++++++-- 5 files changed, 194 insertions(+), 5 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/SECURITY.md create mode 100644 .github/pull_request_template.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..163e439d57 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,39 @@ +--- +name: Scripts-Related Bug Report +about: Use this template for reporting script-related bugs. For contract-related bugs, see our security policy. +title: '' +labels: bug +assignees: '' +--- + +### 🐛 Script Bug Report + +#### 📝 Description + +Provide a clear and concise description of the bug. + +#### 🔄 Reproduction Steps + +Steps to reproduce the behaviour + +#### 🤔 Expected Behavior + +Describe what you expected to happen. + +#### 😯 Current Behavior + +Describe what actually happened. + +#### 🖥️ Environment + +Any relevant environment details. + +#### 📋 Additional Context + +Add any other context about the problem here. If applicable, add screenshots to help explain. + +#### 📎 Log Output + +``` +Paste any relevant log output here. +``` diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..d921e066cc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,21 @@ +--- +name: Feature request +about: Use this template for requesting features +title: '' +labels: feat +assignees: '' +--- + +### 🌟 Feature Request + +#### 📝 Description + +Provide a clear and concise description of the feature you'd like to see. + +#### 🤔 Rationale + +Explain why this feature is important and how it benefits the project. + +#### 📋 Additional Context + +Add any other context or information about the feature request here. diff --git a/.github/SECURITY.md b/.github/SECURITY.md new file mode 100644 index 0000000000..2f2871cea1 --- /dev/null +++ b/.github/SECURITY.md @@ -0,0 +1,74 @@ +# Security Policy + +We truly appreciate efforts to discover and disclose security issues responsibly! + +## Vulnerabilities + +If you'd like to report a security issue in the repositories of matter-labs organization, please proceed to our +[Bug Bounty Program on Immunefi](https://era.zksync.io/docs/reference/troubleshooting/audit-bug-bounty.html#bug-bounty-program). + +## Other Security Issues + +We take an impact-first approach instead of a rules-first approach. Therefore, if you believe you found the impactful +issue but can't report it via the Bug Bounty, please email us at +[security@matterlabs.dev](mailto:security@matterlabs.dev). + +### PGP Key + +The following PGP key may be used to communicate sensitive information to developers: + +Fingerprint: `5FED B2D0 EA2C 4906 DD66 71D7 A2C5 0B40 CE3C F297` + +``` +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGEBmQkBEAD6tlkBEZFMvR8kOgxXX857nC2+oTik6TopJz4uCskuqDaeldMy +l+26BBzLkIeO1loS+bzVgnNFJRrGt9gv98MzNEHJVv6D7GsSLlUX/pz7Lxn0J4ry +o5XIk3MQTCUBdaXGs6GBLl5Xe8o+zNj4MKd4zjgDLinITNlE/YZCDsXyvYS3YFTQ +cwaUTNlawkKgw4BLaEqwB2JuyEhI9wx5X7ibjFL32sWMolYsNAlzFQzM09HCurTn +q0DYau9kPJARcEk9/DK2iq0z3gMCQ8iRTDaOWd8IbSP3HxcEoM5j5ZVAlULmjmUE +StDaMPLj0Kh01Tesh/j+vjchPXHT0n4zqi1+KOesAOk7SIwLadHfQMTpkU7G2fR1 +BrA5MtlzY+4Rm6o7qu3dpZ+Nc4iM3FUnaQRpvn4g5nTh8vjG94OCzX8DXWrCKyxx +amCs9PLDYOpx84fXYv4frkWpKh2digDSUGKhoHaOSnqyyvu3BNWXBCQZJ20rqEIu +sXOQMxWIoWCOOPRRvrHrKDA2hpoKjs3pGsProfpVRzb9702jhWpTfbDp9WjQlFtX +2ZIDxlwAxcugClgrp5JiUxvhg2A9lDNwCF7r1e68uNv5usBZQVKPJmnvS2nWgKy8 +x9oJsnwrEjxwiRHd34UvfMkwY9RENSJ+NoXqBdS7Lwz4m6vgbzq6K56WPQARAQAB +tCRaa1N5bmMgU2VjdXJpdHkgPHNlY3VyaXR5QHprc3luYy5pbz6JAk4EEwEKADgW +IQRf7bLQ6ixJBt1mcdeixQtAzjzylwUCYQGZCQIbAwULCQgHAgYVCgkICwIEFgID +AQIeAQIXgAAKCRCixQtAzjzyl5y8EAC/T3oq88Dak2b+5TlWdU2Gpm6924eAqlMt +y1KksDezzNQUlPiCUVllpin2PIjU/S+yzMWKXJA04LoVkEPfPOWjAaavLOjRumxu +MR6P2dVUg1InqzYVsJuRhKSpeexzNA5qO2BPM7/I2Iea1IoJPjogGbfXCo0r5kne +KU7a5GEa9eDHxpHTsbphQe2vpQ1239mUJrFpzAvILn6jV1tawMn5pNCXbsa8l6l2 +gtlyQPdOQECy77ZJxrgzaUBcs/RPzUGhwA/qNuvpF0whaCvZuUFMVuCTEu5LZka2 +I9Rixy+3jqBeONBgb+Fiz5phbiMX33M9JQwGONFaxdvpFTerLwPK2N1T8zcufa01 +ypzkWGheScFZemBxUwXwK4x579wjsnfrY11w0p1jtDgPTnLlXUA2mom4+7MyXPg0 +F75qh6vU1pdXaCVkruFgPVtIw+ccw2AxD50iZQ943ZERom9k165dR9+QxOVMXQ4P +VUxsFZWvK70/s8TLjsGljvSdSOa85iEUqSqh0AlCwIAxLMiDwh5s/ZgiHoIM6Xih +oCpuZyK9p0dn+DF/XkgAZ/S91PesMye3cGm6M5r0tS26aoc2Pk6X37Hha1pRALwo +MOHyaGjc/jjcXXxv6o55ALrOrzS0LQmLZ+EHuteCT15kmeY3kqYJ3og62KgiDvew +dKHENvg7d7kCDQRhAZleARAA6uD6WfdqGeKV5i170+kLsxR3QGav0qGNAbxpSJyn +iHQ8u7mQk3S+ziwN2AAopfBk1je+vCWtEGC3+DWRRfJSjLbtaBG8e6kLP3/cGA75 +qURz6glTG4nl5fcEAa6B1st0OxjVWiSLX3g/yjz8lznQb9awuRjdeHMnyx5DsJUN +d+Iu5KxGupQvKGOMKivSvC8VWk9taaQRpRF+++6stLCDk3ZtlxiopMs3X2jAp6xG +sOBbix1cv9BTsfaiL7XDL/gviqBPXYY5L42x6+jnPo5lROfnlLYkWrv6KZr7HD4k +tRXeaSwxLD2EkUyb16Jpp0be/ofvBtITGUDDLCGBiaXtx/v8d52MARjsyLJSYloj +1yiW01LfAiWHUC4z5jl2T7E7sicrlLH1M8Z6WbuqjdeaYwtfyPA2YCKr/3fn6pIo +D+pYaBSESmhA92P+XVaf5y2BZ6Qf8LveDpWwsVGdBGh9T0raA1ooe1GESLjmIjUa +z5AeQ/uXL5Md9I6bpMUUJYQiH19RPcFlJriI3phXyyf6Wlkk8oVEeCWyzcmw+x1V +deRTvE2x4WIwKGLXRNjin2j1AP7vU2HaNwlPrLijqdyi68+0irRQONoH7Qonr4ca +xWgL+pAaa3dWxf0xqK7uZFp4aTVWlr2uXtV/eaUtLmGMCU0jnjb109wg5L0F7WRT +PfEAEQEAAYkCNgQYAQoAIBYhBF/tstDqLEkG3WZx16LFC0DOPPKXBQJhAZleAhsM +AAoJEKLFC0DOPPKXAAEP/jK7ch9GkoaYlsuqY/aHtxEwVddUDOxjyn3FMDoln85L +/n8AmLQb2bcpKSqpaJwMbmfEyr5MDm8xnsBTfx3u6kgaLOWfKxjLQ6PM7kgIMdi4 +bfaRRuSEI1/R6c/hNpiGnzAeeexldH1we+eH1IVmh4crdat49S2xh7Qlv9ahvgsP +LfKl3rJ+aaX/Ok0AHzhvSfhFpPr1gAaGeaRt+rhlZsx2QyG4Ez8p2nDAcAzPiB3T +73ENoBIX6mTPfPm1UgrRyFKBqtUzAodz66j3r6ebBlWzIRg8iZenVMAxzjINAsxN +w1Bzfgsi5ZespfsSlmEaa7jJkqqDuEcLa2YuiFAue7Euqwz1aGeq1GfTicQioSCb +Ur/LGyz2Mj3ykbaP8p5mFVcUN51yQy6OcpvR/W1DfRT9SHFT/bCf9ixsjB2HlZGo +uxPJowwqmMgHd755ZzPDUM9YDgLI1yXdcYshObv3Wq537JAxnZJCGRK4Y8SwrMSh +8WRxlaM0AGWXiJFIDD4bQPIdnF3X8w0cGWE5Otkb8mMHOT+rFTVlDODwm1zF6oIG +PTwfVrpiZBwiUtfJol1exr/MzSPyGoJnYs3cRf2E3O+D1LbcR8w0LbjGuUy38Piz +ZO/vCeyJ3JZC5kE8nD+XBA4idwzh0BKEfH9t+WchQ3Up9rxyzLyQamoqt5Xby4pY +=xkM3 +-----END PGP PUBLIC KEY BLOCK----- +``` diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..8ce206c849 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,20 @@ +# What ❔ + + + + + +## Why ❔ + + + + +## Checklist + + + + +- [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). +- [ ] Tests for the changes have been added / updated. +- [ ] Documentation comments have been added / updated. +- [ ] Code has been formatted via `zk fmt` and `zk lint`. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f129e606f7..dd3d458429 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,44 @@ # Contribution Guidelines -Thank you for considering helping out with the source code! We are extremely grateful for any consideration of -contributions to this repository. However, at this time, we generally do not accept external contributions. This policy -will change in the future, so please check back regularly for updates. +Hello! Thanks for your interest in joining the mission to accelerate the mass adoption of crypto for personal +sovereignty! We welcome contributions from anyone on the internet, and are grateful for even the smallest of fixes! -For security issues, please contact us at [security@matterlabs.dev](mailto:security@matterlabs.dev). +## Ways to contribute -Thank you for your support in accelerating the mass adoption of crypto for personal sovereignty! +There are many ways to contribute to the ZK Stack: + +1. Open issues: if you find a bug, have something you believe needs to be fixed, or have an idea for a feature, please + open an issue. +2. Add color to existing issues: provide screenshots, code snippets, and whatever you think would be helpful to resolve + issues. +3. Resolve issues: either by showing an issue isn't a problem and the current state is ok as is or by fixing the problem + and opening a PR. +4. Report security issues, see [our security policy](./github/SECURITY.md). +5. [Join the team!](https://matterlabs.notion.site/Shape-the-future-of-Ethereum-at-Matter-Labs-dfb3b5a037044bb3a8006af2eb0575e0) + +## Fixing issues + +To contribute code fixing issues, please fork the repo, fix an issue, commit, add documentation as per the PR template, +and the repo's maintainers will review the PR. +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) +for guidance how to work with PRs created from a fork. + +## Licenses + +If you contribute to this project, your contributions will be made to the project under both Apache 2.0 and the MIT +license. + +## Resources + +We aim to make it as easy as possible to contribute to the mission. This is still WIP, and we're happy for contributions +and suggestions here too. Some resources to help: + +1. [In-repo docs aimed at developers](docs) +2. [zkSync Era docs!](https://era.zksync.io/docs/) +3. Company links can be found in the [repo's readme](README.md) + +## Code of Conduct + +Be polite and respectful. + +### Thank you From 06e8353b8b18f983a27e468567e996dd20f9bf10 Mon Sep 17 00:00:00 2001 From: Stanislav Breadless Date: Mon, 18 Sep 2023 15:56:47 +0200 Subject: [PATCH 04/11] boojum integration --- .../contracts/common/L2ContractAddresses.sol | 3 + ethereum/contracts/zksync/Config.sol | 3 - ethereum/contracts/zksync/DiamondInit.sol | 4 + ethereum/contracts/zksync/facets/Executor.sol | 159 ++--- ethereum/contracts/zksync/facets/Mailbox.sol | 7 +- .../contracts/zksync/interfaces/IExecutor.sol | 37 +- .../test/unit_tests/executor_test.spec.ts | 561 +++++++----------- .../unit_tests/l1_erc20_bridge_test.spec.ts | 2 +- .../test/unit_tests/l2-upgrade.test.spec.ts | 227 +++++-- ethereum/test/unit_tests/utils.ts | 67 ++- .../validator_timelock_test.spec.ts | 8 +- ethereum/yarn.lock | 12 +- zksync/yarn.lock | 8 +- 13 files changed, 570 insertions(+), 528 deletions(-) diff --git a/ethereum/contracts/common/L2ContractAddresses.sol b/ethereum/contracts/common/L2ContractAddresses.sol index 2c5096f2d9..be0cfe792c 100644 --- a/ethereum/contracts/common/L2ContractAddresses.sol +++ b/ethereum/contracts/common/L2ContractAddresses.sol @@ -25,3 +25,6 @@ address constant L2_KNOWN_CODE_STORAGE_SYSTEM_CONTRACT_ADDR = address(0x8004); /// @dev The address of the context system contract address constant L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR = address(0x800b); + +/// @dev The address of the bytecode compressor system contract +address constant L2_BYTECODE_COMPRESSOR_SYSTEM_CONTRACT_ADDR = address(0x800e); diff --git a/ethereum/contracts/zksync/Config.sol b/ethereum/contracts/zksync/Config.sol index a7652d1b9b..0ae4291b45 100644 --- a/ethereum/contracts/zksync/Config.sol +++ b/ethereum/contracts/zksync/Config.sol @@ -12,9 +12,6 @@ uint256 constant L2_TO_L1_LOG_SERIALIZE_SIZE = 88; /// @dev The maximum length of the bytes array with L2 -> L1 logs uint256 constant MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES = 4 + L2_TO_L1_LOG_SERIALIZE_SIZE * 512; -/// @dev L2 -> L1 logs Merkle tree height -uint256 constant L2_TO_L1_LOG_MERKLE_TREE_HEIGHT = 9; - /// @dev The value of default leaf hash for L2 -> L1 logs Merkle tree /// @dev An incomplete fixed-size tree is filled with this value to be a full binary tree /// @dev Actually equal to the `keccak256(new bytes(L2_TO_L1_LOG_SERIALIZE_SIZE))` diff --git a/ethereum/contracts/zksync/DiamondInit.sol b/ethereum/contracts/zksync/DiamondInit.sol index d9fb9021de..eff654b4ff 100644 --- a/ethereum/contracts/zksync/DiamondInit.sol +++ b/ethereum/contracts/zksync/DiamondInit.sol @@ -67,6 +67,10 @@ contract DiamondInit is Base { s.l2DefaultAccountBytecodeHash = _l2DefaultAccountBytecodeHash; s.priorityTxMaxGasLimit = _priorityTxMaxGasLimit; + // While this does not provide a protection in the production, it is needed for local testing + // Length of the L2Log encoding should not be equal to the length of other L2Logs' tree nodes preimages + assert(L2_TO_L1_LOG_SERIALIZE_SIZE != 2 * 32); + return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE; } } diff --git a/ethereum/contracts/zksync/facets/Executor.sol b/ethereum/contracts/zksync/facets/Executor.sol index 729c3d53b9..e051ff0fb0 100644 --- a/ethereum/contracts/zksync/facets/Executor.sol +++ b/ethereum/contracts/zksync/facets/Executor.sol @@ -4,7 +4,7 @@ pragma solidity ^0.8.13; import {Base} from "./Base.sol"; import {COMMIT_TIMESTAMP_NOT_OLDER, COMMIT_TIMESTAMP_APPROXIMATION_DELTA, EMPTY_STRING_KECCAK, L2_TO_L1_LOG_SERIALIZE_SIZE, INPUT_MASK, MAX_INITIAL_STORAGE_CHANGES_COMMITMENT_BYTES, MAX_REPEATED_STORAGE_CHANGES_COMMITMENT_BYTES, MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, PACKED_L2_BLOCK_TIMESTAMP_MASK} from "../Config.sol"; -import {IExecutor} from "../interfaces/IExecutor.sol"; +import {IExecutor, L2_LOG_ADDRESS_OFFSET, L2_LOG_KEY_OFFSET, L2_LOG_VALUE_OFFSET, SystemLogKey} from "../interfaces/IExecutor.sol"; import {PairingsBn254} from "../libraries/PairingsBn254.sol"; import {PriorityQueue, PriorityOperation} from "../libraries/PriorityQueue.sol"; import {UncheckedMath} from "../../common/libraries/UncheckedMath.sol"; @@ -37,6 +37,8 @@ contract ExecutorFacet is Base, IExecutor { uint256 expectedNumberOfLayer1Txs, bytes32 expectedPriorityOperationsHash, bytes32 previousBlockHash, + bytes32 stateDiffHash, + bytes32 l2LogsTreeRoot, uint256 packedBatchAndL2BlockTimestamp ) = _processL2Logs(_newBlock, _expectedSystemContractUpgradeTxHash); @@ -49,19 +51,8 @@ contract ExecutorFacet is Base, IExecutor { // Check the timestamp of the new block _verifyBlockTimestamp(packedBatchAndL2BlockTimestamp, _newBlock.timestamp, _previousBlock.timestamp); - // Preventing "stack too deep error" - { - // Check the index of repeated storage writes - uint256 newStorageChangesIndexes = uint256(uint32(bytes4(_newBlock.initialStorageChanges[:4]))); - require( - _previousBlock.indexRepeatedStorageChanges + newStorageChangesIndexes == - _newBlock.indexRepeatedStorageChanges, - "yq" - ); - } - // Create block commitment for the proof verification - bytes32 commitment = _createBlockCommitment(_newBlock); + bytes32 commitment = _createBlockCommitment(_newBlock, stateDiffHash); return StoredBlockInfo( @@ -70,7 +61,7 @@ contract ExecutorFacet is Base, IExecutor { _newBlock.indexRepeatedStorageChanges, _newBlock.numberOfLayer1Txs, _newBlock.priorityOperationsHash, - _newBlock.l2LogsTreeRoot, + l2LogsTreeRoot, _newBlock.timestamp, commitment ); @@ -91,7 +82,7 @@ contract ExecutorFacet is Base, IExecutor { // While the fact that _previousBatchTimestamp < batchTimestamp is already checked on L2, // we double check it here for clarity - require(_previousBatchTimestamp < batchTimestamp, "h"); + require(_previousBatchTimestamp < batchTimestamp, "h3"); uint256 lastL2BlockTimestamp = _packedBatchAndL2BlockTimestamp & PACKED_L2_BLOCK_TIMESTAMP_MASK; @@ -104,6 +95,9 @@ contract ExecutorFacet is Base, IExecutor { } /// @dev Check that L2 logs are proper and block contain all meta information for them + /// @dev The logs processed here should line up such that only one log for each key from the + /// SystemLogKey enum in Constants.sol is processed per new block. + /// @dev Data returned from here will be used to form the block commitment. function _processL2Logs(CommitBlockInfo calldata _newBlock, bytes32 _expectedSystemContractUpgradeTxHash) internal pure @@ -111,66 +105,69 @@ contract ExecutorFacet is Base, IExecutor { uint256 numberOfLayer1Txs, bytes32 chainedPriorityTxsHash, bytes32 previousBlockHash, + bytes32 stateDiffHash, + bytes32 l2LogsTreeRoot, uint256 packedBatchAndL2BlockTimestamp ) { // Copy L2 to L1 logs into memory. - bytes memory emittedL2Logs = _newBlock.l2Logs[4:]; - uint256 currentMessage; - // Auxiliary variable that is needed to enforce that `previousBlockHash` and `blockTimestamp` was read exactly one time - bool isSystemContextLogProcessed; - bytes[] calldata factoryDeps = _newBlock.factoryDeps; - uint256 currentBytecode; + bytes memory emittedL2Logs = _newBlock.systemLogs[4:]; + + // Used as bitmap to set/check log processing happens exactly once. + // See SystemLogKey enum in Constants.sol for ordering. + uint256 processedLogs; - chainedPriorityTxsHash = EMPTY_STRING_KECCAK; + bytes32 providedL2ToL1PubdataHash = keccak256(_newBlock.totalL2ToL1Pubdata); // linear traversal of the logs for (uint256 i = 0; i < emittedL2Logs.length; i = i.uncheckedAdd(L2_TO_L1_LOG_SERIALIZE_SIZE)) { - (address logSender, ) = UnsafeBytes.readAddress(emittedL2Logs, i + 4); - - // show preimage for hashed message stored in log - if (logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR) { - (bytes32 hashedMessage, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 56); - require(keccak256(_newBlock.l2ArbitraryLengthMessages[currentMessage]) == hashedMessage, "k2"); - - currentMessage = currentMessage.uncheckedInc(); - } else if (logSender == L2_BOOTLOADER_ADDRESS) { - (bytes32 canonicalTxHash, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 24); - - if (_expectedSystemContractUpgradeTxHash != bytes32(0)) { - require(_expectedSystemContractUpgradeTxHash == canonicalTxHash, "bz"); - _expectedSystemContractUpgradeTxHash = bytes32(0); - } else { - chainedPriorityTxsHash = keccak256(abi.encode(chainedPriorityTxsHash, canonicalTxHash)); - // Overflow is not realistic - numberOfLayer1Txs = numberOfLayer1Txs.uncheckedInc(); - } - } else if (logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR) { - // Make sure that the system context log wasn't processed yet, to - // avoid accident double reading `blockTimestamp` and `previousBlockHash` - require(!isSystemContextLogProcessed, "fx"); - (packedBatchAndL2BlockTimestamp, ) = UnsafeBytes.readUint256(emittedL2Logs, i + 24); - (previousBlockHash, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 56); - // Mark system context log as processed - isSystemContextLogProcessed = true; - } else if (logSender == L2_KNOWN_CODE_STORAGE_SYSTEM_CONTRACT_ADDR) { - (bytes32 bytecodeHash, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 24); - require(bytecodeHash == L2ContractHelper.hashL2Bytecode(factoryDeps[currentBytecode]), "k3"); - - currentBytecode = currentBytecode.uncheckedInc(); + // Extract the values to be compared to/used such as the log sender, key, and value + (address logSender, ) = UnsafeBytes.readAddress(emittedL2Logs, i + L2_LOG_ADDRESS_OFFSET); + (uint256 logKey, ) = UnsafeBytes.readUint256(emittedL2Logs, i + L2_LOG_KEY_OFFSET); + (bytes32 logValue, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + L2_LOG_VALUE_OFFSET); + + // Ensure that the log hasn't been processed already + require(!_checkBit(processedLogs, uint8(logKey)), "kp"); + processedLogs = _setBit(processedLogs, uint8(logKey)); + + // Need to check that each log was sent by the correct address. + if (logKey == uint256(SystemLogKey.L2_TO_L1_LOGS_TREE_ROOT_KEY)) { + require(logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, "lm"); + l2LogsTreeRoot = logValue; + } else if (logKey == uint256(SystemLogKey.TOTAL_L2_TO_L1_PUBDATA_KEY)) { + require(logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, "ln"); + require(providedL2ToL1PubdataHash == logValue, "wp"); + } else if (logKey == uint256(SystemLogKey.STATE_DIFF_HASH_KEY)) { + require(logSender == L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, "lb"); + stateDiffHash = logValue; + } else if (logKey == uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)) { + require(logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, "sc"); + packedBatchAndL2BlockTimestamp = uint256(logValue); + } else if (logKey == uint256(SystemLogKey.PREV_BLOCK_HASH_KEY)) { + require(logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, "sv"); + previousBlockHash = logValue; + } else if (logKey == uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY)) { + require(logSender == L2_BOOTLOADER_ADDRESS, "bl"); + chainedPriorityTxsHash = logValue; + } else if (logKey == uint256(SystemLogKey.NUMBER_OF_LAYER_1_TXS_KEY)) { + require(logSender == L2_BOOTLOADER_ADDRESS, "bk"); + numberOfLayer1Txs = uint256(logValue); + } else if (logKey == uint256(SystemLogKey.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH)) { + require(logSender == L2_BOOTLOADER_ADDRESS, "bu"); + require(_expectedSystemContractUpgradeTxHash == logValue, "ut"); } else { - // Only some system contracts could send raw logs from L2 to L1, double check that invariant holds here. - revert("ne"); + revert("ul"); } } - // To check that only relevant preimages have been included in the calldata - require(currentBytecode == factoryDeps.length, "ym"); - require(currentMessage == _newBlock.l2ArbitraryLengthMessages.length, "pl"); - // `blockTimestamp` and `previousBlockHash` wasn't read from L2 logs - require(isSystemContextLogProcessed, "by"); - - // Making sure that the system contract upgrade was included if needed - require(_expectedSystemContractUpgradeTxHash == bytes32(0), "bw"); + + // We only require 7 logs to be checked, the 8th is if we are expecting a protocol upgrade + // Without the protocol upgrade we expect 7 logs: 2^7 - 1 = 127 + // With the protocol upgrade we expect 8 logs: 2^8 - 1 = 255 + if (_expectedSystemContractUpgradeTxHash == bytes32(0)) { + require(processedLogs == 127, "b7"); + } else { + require(processedLogs == 255, "b8"); + } } /// @notice Commit block @@ -461,10 +458,14 @@ contract ExecutorFacet is Base, IExecutor { } /// @dev Creates block commitment from its data - function _createBlockCommitment(CommitBlockInfo calldata _newBlockData) internal view returns (bytes32) { + function _createBlockCommitment(CommitBlockInfo calldata _newBlockData, bytes32 _stateDiffHash) + internal + view + returns (bytes32) + { bytes32 passThroughDataHash = keccak256(_blockPassThroughData(_newBlockData)); bytes32 metadataHash = keccak256(_blockMetaParameters()); - bytes32 auxiliaryOutputHash = keccak256(_blockAuxiliaryOutput(_newBlockData)); + bytes32 auxiliaryOutputHash = keccak256(_blockAuxiliaryOutput(_newBlockData, _stateDiffHash)); return keccak256(abi.encode(passThroughDataHash, metadataHash, auxiliaryOutputHash)); } @@ -483,20 +484,30 @@ contract ExecutorFacet is Base, IExecutor { return abi.encodePacked(s.zkPorterIsAvailable, s.l2BootloaderBytecodeHash, s.l2DefaultAccountBytecodeHash); } - function _blockAuxiliaryOutput(CommitBlockInfo calldata _block) internal pure returns (bytes memory) { - require(_block.initialStorageChanges.length <= MAX_INITIAL_STORAGE_CHANGES_COMMITMENT_BYTES, "pf"); - require(_block.repeatedStorageChanges.length <= MAX_REPEATED_STORAGE_CHANGES_COMMITMENT_BYTES, "py"); - require(_block.l2Logs.length <= MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, "pu"); + function _blockAuxiliaryOutput(CommitBlockInfo calldata _block, bytes32 _stateDiffHash) + internal + pure + returns (bytes memory) + { + require(_block.systemLogs.length <= MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, "pu"); - bytes32 initialStorageChangesHash = keccak256(_block.initialStorageChanges); - bytes32 repeatedStorageChangesHash = keccak256(_block.repeatedStorageChanges); - bytes32 l2ToL1LogsHash = keccak256(_block.l2Logs); + bytes32 l2ToL1LogsHash = keccak256(_block.systemLogs); - return abi.encode(_block.l2LogsTreeRoot, l2ToL1LogsHash, initialStorageChangesHash, repeatedStorageChangesHash); + return abi.encode(l2ToL1LogsHash, _stateDiffHash); } /// @notice Returns the keccak hash of the ABI-encoded StoredBlockInfo function _hashStoredBlockInfo(StoredBlockInfo memory _storedBlockInfo) internal pure returns (bytes32) { return keccak256(abi.encode(_storedBlockInfo)); } + + /// @notice Returns if the bit at index {_index} is 1 + function _checkBit(uint256 _bitMap, uint8 _index) internal pure returns (bool) { + return (_bitMap & (1 << _index)) > 0; + } + + /// @notice Sets the given bit in {_num} at index {_index} to 1. + function _setBit(uint256 _num, uint8 _index) internal pure returns (uint256) { + return _num | (1 << _index); + } } diff --git a/ethereum/contracts/zksync/facets/Mailbox.sol b/ethereum/contracts/zksync/facets/Mailbox.sol index d2a59c9821..2718fc0f74 100644 --- a/ethereum/contracts/zksync/facets/Mailbox.sol +++ b/ethereum/contracts/zksync/facets/Mailbox.sol @@ -119,9 +119,10 @@ contract MailboxFacet is Base, IMailbox { // Check that hashed log is not the default one, // otherwise it means that the value is out of range of sent L2 -> L1 logs require(hashedLog != L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, "tw"); - // Check that the proof length is exactly the same as tree height, to prevent - // any shorter/longer paths attack on the Merkle path validation - require(_proof.length == L2_TO_L1_LOG_MERKLE_TREE_HEIGHT, "rz"); + + // It is ok to not check length of `_proof` array, as length + // of leaf preimage (which is `L2_TO_L1_LOG_SERIALIZE_SIZE`) is not + // equal to the length of other nodes preimages (which are `2 * 32`) bytes32 calculatedRootHash = Merkle.calculateRoot(_proof, _index, hashedLog); bytes32 actualRootHash = s.l2LogsRootHashes[_blockNumber]; diff --git a/ethereum/contracts/zksync/interfaces/IExecutor.sol b/ethereum/contracts/zksync/interfaces/IExecutor.sol index 5fb7a5ffa7..7c6821d316 100644 --- a/ethereum/contracts/zksync/interfaces/IExecutor.sol +++ b/ethereum/contracts/zksync/interfaces/IExecutor.sol @@ -4,6 +4,27 @@ pragma solidity ^0.8.13; import "./IBase.sol"; +/// @dev Enum used by L2 System Contracts to differentiate logs. +enum SystemLogKey { + L2_TO_L1_LOGS_TREE_ROOT_KEY, + TOTAL_L2_TO_L1_PUBDATA_KEY, + STATE_DIFF_HASH_KEY, + PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + PREV_BLOCK_HASH_KEY, + CHAINED_PRIORITY_TXN_HASH_KEY, + NUMBER_OF_LAYER_1_TXS_KEY, + EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH +} + +/// @dev Offset used to pull Address From Log. Equal to 4 (bytes for isService) +uint256 constant L2_LOG_ADDRESS_OFFSET = 4; + +/// @dev Offset used to pull Key From Log. Equal to 4 (bytes for isService) + 20 (bytes for address) +uint256 constant L2_LOG_KEY_OFFSET = 24; + +/// @dev Offset used to pull Value From Log. Equal to 4 (bytes for isService) + 20 (bytes for address) + 32 (bytes for key) +uint256 constant L2_LOG_VALUE_OFFSET = 56; + interface IExecutor is IBase { /// @notice Rollup block stored data /// @param blockNumber Rollup block number @@ -31,26 +52,18 @@ interface IExecutor is IBase { /// @param indexRepeatedStorageChanges The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more /// @param newStateRoot The state root of the full state tree /// @param numberOfLayer1Txs Number of priority operations to be processed - /// @param l2LogsTreeRoot The root hash of the tree that contains all L2 -> L1 logs in the block /// @param priorityOperationsHash Hash of all priority operations from this block - /// @param initialStorageChanges Storage write access as a concatenation key-value - /// @param repeatedStorageChanges Storage write access as a concatenation index-value - /// @param l2Logs concatenation of all L2 -> L1 logs in the block - /// @param l2ArbitraryLengthMessages array of hash preimages that were sent as value of L2 logs by special system L2 contract - /// @param factoryDeps array of l2 bytecodes that were marked as known on L2 + /// @param systemLogs concatenation of all L2 -> L1 system logs in the block + /// @param totalL2ToL1Pubdata Total pubdata committed to as part of bootloader run. Contents are: l2Tol1Logs <> l2Tol1Messages <> publishedBytecodes <> stateDiffs struct CommitBlockInfo { uint64 blockNumber; uint64 timestamp; uint64 indexRepeatedStorageChanges; bytes32 newStateRoot; uint256 numberOfLayer1Txs; - bytes32 l2LogsTreeRoot; bytes32 priorityOperationsHash; - bytes initialStorageChanges; - bytes repeatedStorageChanges; - bytes l2Logs; - bytes[] l2ArbitraryLengthMessages; - bytes[] factoryDeps; + bytes systemLogs; + bytes totalL2ToL1Pubdata; } /// @notice Recursive proof input data (individual commitments are constructed onchain) diff --git a/ethereum/test/unit_tests/executor_test.spec.ts b/ethereum/test/unit_tests/executor_test.spec.ts index 99f3fe1b3a..7814cbe05c 100644 --- a/ethereum/test/unit_tests/executor_test.spec.ts +++ b/ethereum/test/unit_tests/executor_test.spec.ts @@ -18,9 +18,10 @@ import { AccessMode, EMPTY_STRING_KECCAK, L2_BOOTLOADER_ADDRESS, - L2_KNOWN_CODE_STORAGE_ADDRESS, L2_SYSTEM_CONTEXT_ADDRESS, - L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS, + constructL2Log, + createSystemLogs, genesisStoredBlockInfo, getCallRevertReason, packBatchTimestampAndBlockTimestamp, @@ -40,6 +41,7 @@ describe(`Executor tests`, function () { let currentTimestamp: number; let newCommitBlockInfo: any; let newStoredBlockInfo: any; + let logs: any; const proofInput = { recursiveAggregationInput: [], @@ -134,13 +136,9 @@ describe(`Executor tests`, function () { indexRepeatedStorageChanges: 0, newStateRoot: ethers.utils.randomBytes(32), numberOfLayer1Txs: 0, - l2LogsTreeRoot: ethers.utils.randomBytes(32), priorityOperationsHash: ethers.utils.randomBytes(32), - initialStorageChanges: `0x`, - repeatedStorageChanges: `0x`, - l2Logs: `0x`, - l2ArbitraryLengthMessages: [], - factoryDeps: [] + systemLogs: `0x`, + totalL2ToL1Pubdata: `0x` }; it(`Should revert on committing by unauthorised address`, async () => { @@ -150,7 +148,7 @@ describe(`Executor tests`, function () { expect(revertReason).equal(`1h`); }); - it(`Should revert on committing by unauthorised address`, async () => { + it(`Should revert on proving by unauthorised address`, async () => { const revertReason = await getCallRevertReason( executor.connect(randomSigner).proveBlocks(storedBlockInfo, [storedBlockInfo], proofInput) ); @@ -168,19 +166,16 @@ describe(`Executor tests`, function () { describe(`Commiting functionality`, async function () { before(async () => { currentTimestamp = (await hardhat.ethers.providers.getDefaultProvider().getBlock(`latest`)).timestamp; + logs = ethers.utils.hexConcat([`0x00000007`].concat(createSystemLogs())); newCommitBlockInfo = { blockNumber: 1, timestamp: currentTimestamp, indexRepeatedStorageChanges: 0, newStateRoot: ethers.utils.randomBytes(32), numberOfLayer1Txs: 0, - l2LogsTreeRoot: ethers.constants.HashZero, priorityOperationsHash: EMPTY_STRING_KECCAK, - initialStorageChanges: `0x00000000`, - repeatedStorageChanges: `0x`, - l2Logs: `0x`, - l2ArbitraryLengthMessages: [], - factoryDeps: [] + systemLogs: logs, + totalL2ToL1Pubdata: ethers.constants.HashZero }; }); @@ -205,17 +200,17 @@ describe(`Executor tests`, function () { }); it(`Should revert on committing with wrong new block timestamp`, async () => { - const wrongNewBlockTimestamp = ethers.utils.randomBytes(32); // correct value is 0 - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + const wrongNewBlockTimestamp = ethers.utils.hexValue(ethers.utils.randomBytes(32)); // correct value is 0 + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - wrongNewBlockTimestamp, - ethers.constants.HashZero - ]); + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + wrongNewBlockTimestamp.toString() + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) @@ -224,17 +219,20 @@ describe(`Executor tests`, function () { }); it(`Should revert on committing with too small new block timestamp`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + const wrongNewBlockTimestamp = 1; // too small + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(1, 1), - ethers.constants.HashZero - ]); + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + ethers.utils.hexlify( + packBatchTimestampAndBlockTimestamp(wrongNewBlockTimestamp, wrongNewBlockTimestamp) + ) + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.timestamp = 1; // too small + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + wrongNewCommitBlockInfo.timestamp = wrongNewBlockTimestamp; const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) @@ -243,18 +241,18 @@ describe(`Executor tests`, function () { }); it(`Should revert on committing with too big last L2 block timestamp`, async () => { - const wrongL2BlockTimestamp = parseInt('0xffffffff'); - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + const wrongNewBlockTimestamp = `0xffffffff`; // too big + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(wrongL2BlockTimestamp, wrongL2BlockTimestamp), - ethers.constants.HashZero - ]); + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(wrongNewBlockTimestamp, wrongNewBlockTimestamp) + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.timestamp = wrongL2BlockTimestamp; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + wrongNewCommitBlockInfo.timestamp = parseInt(wrongNewBlockTimestamp); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) @@ -264,16 +262,16 @@ describe(`Executor tests`, function () { it(`Should revert on committing with wrong previous blockhash`, async () => { const wrongPreviousBlockHash = ethers.utils.randomBytes(32); // correct value is bytes32(0) - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - wrongPreviousBlockHash - ]); + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(wrongPreviousBlockHash) + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) @@ -282,74 +280,70 @@ describe(`Executor tests`, function () { }); it(`Should revert on committing without processing system context log`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([`0x00000000`]); + var wrongL2Logs = createSystemLogs(); + delete wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY]; const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000006`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) ); - expect(revertReason).equal(`by`); + expect(revertReason).equal(`b7`); }); it(`Should revert on committing with processing system context log twice`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero - ]); + var wrongL2Logs = createSystemLogs(); + wrongL2Logs.push( + constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + ethers.constants.HashZero + ) + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) ); - expect(revertReason).equal(`fx`); + expect(revertReason).equal(`kp`); }); it('Should revert on unexpected L2->L1 log', async () => { // We do not expect to receive an L2->L1 log from zero address const unexpectedAddress = ethers.constants.AddressZero; - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, unexpectedAddress, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, ethers.constants.HashZero - ]); + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) ); - expect(revertReason).equal(`ne`); + expect(revertReason).equal(`sc`); }); it(`Should revert on committing with wrong canonical tx hash`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, + var wrongChainedPriorityHash = ethers.utils.randomBytes(32); + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY] = constructL2Log( + true, L2_BOOTLOADER_ADDRESS, - ethers.utils.randomBytes(32), //wrong canonical tx hash - ethers.utils.hexZeroPad(`0x01`, 32) - ]); + SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY, + ethers.utils.hexlify(wrongChainedPriorityHash) + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) @@ -358,26 +352,16 @@ describe(`Executor tests`, function () { }); it(`Should revert on committing with wrong number of layer 1 TXs`, async () => { - const arbitraryCanonicalTxHash = ethers.utils.randomBytes(32); - const chainedPriorityTxHash = ethers.utils.keccak256( - ethers.utils.hexConcat([EMPTY_STRING_KECCAK, arbitraryCanonicalTxHash]) - ); - - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, + var wrongL2Logs = createSystemLogs(); + wrongL2Logs[SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY] = constructL2Log( + true, L2_BOOTLOADER_ADDRESS, - arbitraryCanonicalTxHash, - ethers.utils.hexZeroPad(`0x01`, 32) - ]); + SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY, + ethers.utils.hexlify(0x01) + ); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.priorityOperationsHash = chainedPriorityTxHash; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); wrongNewCommitBlockInfo.numberOfLayer1Txs = 2; // wrong number const revertReason = await getCallRevertReason( @@ -386,256 +370,71 @@ describe(`Executor tests`, function () { expect(revertReason).equal(`ta`); }); - it(`Should revert on committing with wrong factory deps data`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, - L2_KNOWN_CODE_STORAGE_ADDRESS, - ethers.utils.randomBytes(32) - ]); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.factoryDeps = [ethers.utils.randomBytes(32)]; - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`k3`); - }); - - it(`Should revert on committing with wrong factory deps array length`, async () => { - const arbitraryBytecode = ethers.utils.randomBytes(32); - const arbitraryBytecodeHash = ethers.utils.sha256(arbitraryBytecode); - const arbitraryBytecodeHashManipulated1 = BigNumber.from(arbitraryBytecodeHash).and( - `0x00000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF` - ); - const arbitraryBytecodeHashManipulated2 = BigNumber.from(arbitraryBytecodeHashManipulated1).or( - `0x0100000100000000000000000000000000000000000000000000000000000000` - ); - - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, - L2_KNOWN_CODE_STORAGE_ADDRESS, - ethers.utils.hexlify(arbitraryBytecodeHashManipulated2) - ]); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.factoryDeps = [arbitraryBytecode, arbitraryBytecode]; - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`ym`); - }); - - it(`Should revert on committing with wrong hashed message`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, - L2_TO_L1_MESSENGER, - ethers.constants.HashZero, - ethers.utils.randomBytes(32) - ]); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.l2ArbitraryLengthMessages = [ethers.utils.randomBytes(32)]; - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`k2`); - }); - - it(`Should revert on committing with wrong number of messages`, async () => { - const arbitraryMessage = `0xaa`; - const arbitraryHashedMessage = ethers.utils.keccak256(arbitraryMessage); - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, - L2_TO_L1_MESSENGER, - ethers.constants.HashZero, - arbitraryHashedMessage - ]); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.l2ArbitraryLengthMessages = [arbitraryMessage, arbitraryMessage]; // wrong number - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`pl`); - }); - - it(`Should revert on committing with wrong bytecode length`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, - L2_KNOWN_CODE_STORAGE_ADDRESS, - ethers.utils.randomBytes(32) - ]); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.factoryDeps = [ethers.utils.randomBytes(20)]; - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal('bl'); - }); - - it(`Should revert on committing with wrong number of words in the bytecode`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - ethers.utils.hexZeroPad(ethers.utils.hexlify(currentTimestamp), 32), - ethers.constants.HashZero, - `0x00010000`, - L2_KNOWN_CODE_STORAGE_ADDRESS, - ethers.utils.randomBytes(32) - ]); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.factoryDeps = [ethers.utils.randomBytes(64)]; - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`pr`); - }); - - it(`Should revert on committing with wrong reapeated storage writes`, async () => { - const wrongL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ]); + it(`Should revert on committing with unknown system log key`, async () => { + var wrongL2Logs = createSystemLogs(); + wrongL2Logs.push(constructL2Log(true, L2_SYSTEM_CONTEXT_ADDRESS, 119, ethers.constants.HashZero)); const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.indexRepeatedStorageChanges = 0; // wrong value, it should be 1 - wrongNewCommitBlockInfo.initialStorageChanges = `0x00000001`; + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) ); - expect(revertReason).equal(`yq`); + expect(revertReason).equal(`ul`); }); - it(`Should revert on committing with too long L2 logs`, async () => { - // uint256 constant MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES = 4 + L2_TO_L1_LOG_SERIALIZE_SIZE * 512; - const arr1 = Array(512) - .fill([`0x00000000`, L2_TO_L1_MESSENGER, ethers.constants.HashZero, ethers.utils.keccak256('0x')]) - .flat(); + it(`Should revert for system log from incorrect address`, async () => { + var tests = [ + [ethers.constants.HashZero, 'lm'], + [`0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563`, 'ln'], + [ethers.constants.HashZero, 'lb'], + [ethers.constants.HashZero, 'sc'], + [ethers.constants.HashZero, 'sv'], + [EMPTY_STRING_KECCAK, 'bl'], + [ethers.constants.HashZero, 'bk'] + ]; - const arr2 = [ - `0x00000001`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ].concat(arr1); - - const wrongL2Logs = ethers.utils.hexConcat(arr2); + for (var i = 0; i < tests.length; i++) { + var wrongL2Logs = createSystemLogs(); + var wrong_addr = ethers.utils.hexlify(ethers.utils.randomBytes(20)); + wrongL2Logs[i] = constructL2Log(true, wrong_addr, i, tests[i][0]); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = wrongL2Logs; - wrongNewCommitBlockInfo.l2ArbitraryLengthMessages = Array(512).fill('0x'); + const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`pu`); + const revertReason = await getCallRevertReason( + executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + ); + expect(revertReason).equal(tests[i][1]); + } }); - it(`Should revert on committing with too long reapeated storage changes`, async () => { - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ]); - - // uint256 constant MAX_REPEATED_STORAGE_CHANGES_COMMITMENT_BYTES = 4 + REPEATED_STORAGE_CHANGE_SERIALIZE_SIZE * 7564; - const arr1 = Array(7565).fill(ethers.utils.randomBytes(40)).flat(); - const arr2 = [`0x00000000`].concat(arr1); - const wrongRepeatedStorageChanges = ethers.utils.hexConcat(arr2); + it(`Should revert for system log missing`, async () => { + for (var i = 0; i < 7; i++) { + var l2Logs = createSystemLogs(); + delete l2Logs[i]; - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = correctL2Logs; - wrongNewCommitBlockInfo.repeatedStorageChanges = wrongRepeatedStorageChanges; // too long repeated storage changes + const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); + wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000006`].concat(l2Logs)); - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`py`); - }); - - it(`Should revert on committing with too long initial storage changes`, async () => { - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, - L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ]); - - // uint256 constant MAX_INITIAL_STORAGE_CHANGES_COMMITMENT_BYTES = 4 + INITIAL_STORAGE_CHANGE_SERIALIZE_SIZE * 4765; - const arr1 = Array(4766).fill(ethers.utils.randomBytes(64)); - const arr2 = [`0x00000000`].concat(arr1); - const wrongInitialStorageChanges = ethers.utils.hexConcat(arr2); - - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.l2Logs = correctL2Logs; - wrongNewCommitBlockInfo.initialStorageChanges = wrongInitialStorageChanges; // too long initial storage changes - - const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) - ); - expect(revertReason).equal(`pf`); + const revertReason = await getCallRevertReason( + executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + ); + expect(revertReason).equal('b7'); + } }); it(`Should successfully commit a block`, async () => { - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + var correctL2Logs = createSystemLogs(); + correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ]); + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.l2Logs = correctL2Logs; + correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); const commitTx = await executor .connect(validator) @@ -652,6 +451,20 @@ describe(`Executor tests`, function () { describe(`Proving functionality`, async function () { before(async () => { + // Reusing the old timestamp + currentTimestamp = newCommitBlockInfo.timestamp; + + newCommitBlockInfo = { + blockNumber: 1, + timestamp: currentTimestamp, + indexRepeatedStorageChanges: 0, + newStateRoot: ethers.utils.randomBytes(32), + numberOfLayer1Txs: 0, + priorityOperationsHash: EMPTY_STRING_KECCAK, + systemLogs: logs, + totalL2ToL1Pubdata: ethers.constants.HashZero + }; + newStoredBlockInfo = { blockNumber: 1, blockHash: newCommitedBlockBlockHash, @@ -694,18 +507,25 @@ describe(`Executor tests`, function () { }); it(`Should prove successfuly`, async () => { - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + var correctL2Logs = createSystemLogs(); + correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ]); + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.l2Logs = correctL2Logs; + correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); - await executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); + var commitTx = await executor + .connect(validator) + .commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); + + var result = await commitTx.wait(); + + newStoredBlockInfo.blockHash = result.events[0].args.blockHash; + newStoredBlockInfo.commitment = result.events[0].args.commitment; await executor.connect(validator).proveBlocks(genesisStoredBlockInfo(), [newStoredBlockInfo], proofInput); expect(await getters.getTotalBlocksVerified()).equal(1); @@ -755,20 +575,29 @@ describe(`Executor tests`, function () { ethers.utils.hexConcat([EMPTY_STRING_KECCAK, arbitraryCanonicalTxHash]) ); - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, + var correctL2Logs = createSystemLogs(); + correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero, - `0x00010000`, + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + correctL2Logs[SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY] = constructL2Log( + true, L2_BOOTLOADER_ADDRESS, - arbitraryCanonicalTxHash, - ethers.utils.hexZeroPad(`0x01`, 32) - ]); + SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY, + chainedPriorityTxHash + ); + correctL2Logs[SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY] = constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY, + '0x01' + ); const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.l2Logs = correctL2Logs; + correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); + correctNewCommitBlockInfo.priorityOperationsHash = chainedPriorityTxHash; correctNewCommitBlockInfo.numberOfLayer1Txs = 1; @@ -802,20 +631,28 @@ describe(`Executor tests`, function () { ethers.utils.hexConcat([EMPTY_STRING_KECCAK, arbitraryCanonicalTxHash]) ); - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000002`, - `0x00000000`, + var correctL2Logs = createSystemLogs(); + correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero, - `0x00010000`, + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); + correctL2Logs[SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY] = constructL2Log( + true, L2_BOOTLOADER_ADDRESS, - arbitraryCanonicalTxHash, - ethers.utils.hexZeroPad(`0x01`, 32) - ]); + SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY, + chainedPriorityTxHash + ); + correctL2Logs[SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY] = constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY, + '0x01' + ); const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.l2Logs = correctL2Logs; + correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); correctNewCommitBlockInfo.priorityOperationsHash = chainedPriorityTxHash; correctNewCommitBlockInfo.numberOfLayer1Txs = 1; @@ -875,16 +712,16 @@ describe(`Executor tests`, function () { }); it(`Should execute a block successfully`, async () => { - const correctL2Logs = ethers.utils.hexConcat([ - `0x00000001`, - `0x00000000`, + var correctL2Logs = createSystemLogs(); + correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, L2_SYSTEM_CONTEXT_ADDRESS, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp), - ethers.constants.HashZero - ]); + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + ); const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.l2Logs = correctL2Logs; + correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); await executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); await executor.connect(validator).proveBlocks(genesisStoredBlockInfo(), [newStoredBlockInfo], proofInput); diff --git a/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts b/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts index fbbb776d41..a3a8f5a3ac 100644 --- a/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts +++ b/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts @@ -190,7 +190,7 @@ describe(`L1ERC20Bridge tests`, function () { const revertReason = await getCallRevertReason( l1ERC20Bridge.connect(randomSigner).finalizeWithdrawal(0, 0, 0, l2ToL1message, []) ); - expect(revertReason).equal(`rz`); + expect(revertReason).equal(`xc`); }); it(`Should revert on finalizing a withdrawal with wrong proof`, async () => { diff --git a/ethereum/test/unit_tests/l2-upgrade.test.spec.ts b/ethereum/test/unit_tests/l2-upgrade.test.spec.ts index edae977d78..b6cd631b1f 100644 --- a/ethereum/test/unit_tests/l2-upgrade.test.spec.ts +++ b/ethereum/test/unit_tests/l2-upgrade.test.spec.ts @@ -24,6 +24,10 @@ import { CommitBlockInfo, L2_SYSTEM_CONTEXT_ADDRESS, L2_BOOTLOADER_ADDRESS, + createSystemLogs, + SYSTEM_LOG_KEYS, + constructL2Log, + L2_TO_L1_MESSENGER, packBatchTimestampAndBlockTimestamp } from './utils'; import * as ethers from 'ethers'; @@ -476,59 +480,110 @@ describe('L2 upgrade test', function () { const revertReason = await getCallRevertReason( proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoNoUpgradeTx]) ); - expect(revertReason).to.equal('bw'); + expect(revertReason).to.equal('b8'); }); it('Should ensure any additional upgrade logs go to the priority ops hash', async () => { if (!l2UpgradeTxHash) { throw new Error('Can not perform this test without l2UpgradeTxHash'); } - const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; - const l2Logs = encodeLogs([ - contextLog(timestamp, storedBlock1Info.blockHash), - bootloaderLog(l2UpgradeTxHash), - bootloaderLog(l2UpgradeTxHash) - ]); - const block2InfoNoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 2, - timestamp, - l2Logs - }); + const systemLogs = createSystemLogs(); + systemLogs.push( + constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH, + l2UpgradeTxHash + ) + ); + systemLogs.push( + constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH, + l2UpgradeTxHash + ) + ); + systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(storedBlock1Info.blockHash) + ); + + const block2InfoNoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( + storedBlock1Info, + { + blockNumber: 2 + }, + systemLogs + ); const revertReason = await getCallRevertReason( proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoNoUpgradeTx]) ); - expect(revertReason).to.equal('t'); + expect(revertReason).to.equal('kp'); }); it('Should fail to commit when upgrade tx hash does not match', async () => { const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; - const l2Logs = encodeLogs([ - contextLog(timestamp, storedBlock1Info.blockHash), - bootloaderLog('0x' + '0'.repeat(64)) - ]); + const systemLogs = createSystemLogs(); + systemLogs.push( + constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH, + ethers.constants.HashZero + ) + ); + systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(storedBlock1Info.blockHash) + ); - const block2InfoTwoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 2, - timestamp, - l2Logs - }); + const block2InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( + storedBlock1Info, + { + blockNumber: 2, + timestamp + }, + systemLogs + ); const revertReason = await getCallRevertReason( proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoTwoUpgradeTx]) ); - expect(revertReason).to.equal('bz'); + expect(revertReason).to.equal('ut'); }); it('Should commit successfully when the upgrade tx is present', async () => { const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; - const l2Logs = encodeLogs([contextLog(timestamp, storedBlock1Info.blockHash), bootloaderLog(l2UpgradeTxHash)]); + const systemLogs = createSystemLogs(); + systemLogs.push( + constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH, + l2UpgradeTxHash + ) + ); + systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(storedBlock1Info.blockHash) + ); - const block2InfoTwoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 2, - timestamp, - l2Logs - }); + const block2InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( + storedBlock1Info, + { + blockNumber: 2, + timestamp + }, + systemLogs + ); await (await proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoTwoUpgradeTx])).wait(); @@ -538,13 +593,30 @@ describe('L2 upgrade test', function () { it('Should commit successfully when block was reverted and reupgraded', async () => { await (await proxyExecutor.revertBlocks(1)).wait(); const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; - const l2Logs = encodeLogs([contextLog(timestamp, storedBlock1Info.blockHash), bootloaderLog(l2UpgradeTxHash)]); + const systemLogs = createSystemLogs(); + systemLogs.push( + constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH, + l2UpgradeTxHash + ) + ); + systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(storedBlock1Info.blockHash) + ); - const block2InfoTwoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 2, - timestamp, - l2Logs - }); + const block2InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( + storedBlock1Info, + { + blockNumber: 2, + timestamp + }, + systemLogs + ); const commitReceipt = await ( await proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoTwoUpgradeTx]) @@ -568,11 +640,23 @@ describe('L2 upgrade test', function () { ).wait(); const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; + const systemLogs = createSystemLogs(); + systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(storedBlock1Info.blockHash) + ); + + const block3InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( + storedBlock1Info, + { + blockNumber: 3, + timestamp + }, + systemLogs + ); - const block3InfoTwoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 3, - timestamp - }); const commitReceipt = await ( await proxyExecutor.commitBlocks(storedBlock1Info, [block3InfoTwoUpgradeTx]) ).wait(); @@ -610,11 +694,23 @@ describe('L2 upgrade test', function () { }); const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; + const systemLogs = createSystemLogs(); + systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, + ethers.utils.hexlify(storedBlock1Info.blockHash) + ); + + const block3InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( + storedBlock1Info, + { + blockNumber: 4, + timestamp + }, + systemLogs + ); - const block3InfoTwoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 4, - timestamp - }); const commitReceipt = await ( await proxyExecutor.commitBlocks(storedBlock1Info, [block3InfoTwoUpgradeTx]) ).wait(); @@ -676,18 +772,49 @@ async function buildCommitBlockInfo( info: CommitBlockInfoWithTimestamp ): Promise { const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock('latest')).timestamp; + let systemLogs = createSystemLogs(); + systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(timestamp, timestamp) + ); + return { timestamp, indexRepeatedStorageChanges: 0, newStateRoot: ethers.utils.randomBytes(32), numberOfLayer1Txs: 0, - l2LogsTreeRoot: ethers.constants.HashZero, priorityOperationsHash: EMPTY_STRING_KECCAK, - initialStorageChanges: `0x00000000`, - repeatedStorageChanges: `0x`, - l2Logs: encodeLogs([contextLog(timestamp, prevInfo.blockHash)]), - l2ArbitraryLengthMessages: [], - factoryDeps: [], + systemLogs: ethers.utils.hexConcat([`0x00000007`].concat(systemLogs)), + totalL2ToL1Pubdata: ethers.constants.HashZero, + ...info + }; +} + +async function buildCommitBlockInfoWithCustomLogs( + prevInfo: StoredBlockInfo, + info: CommitBlockInfoWithTimestamp, + systemLogs: string[] +): Promise { + const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock('latest')).timestamp; + systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + packBatchTimestampAndBlockTimestamp(timestamp, timestamp) + ); + + const size = systemLogs.length == 7 ? `0x00000007` : `0x00000008`; + + return { + timestamp, + indexRepeatedStorageChanges: 0, + newStateRoot: ethers.utils.randomBytes(32), + numberOfLayer1Txs: 0, + priorityOperationsHash: EMPTY_STRING_KECCAK, + systemLogs: ethers.utils.hexConcat([size].concat(systemLogs)), + totalL2ToL1Pubdata: ethers.constants.HashZero, ...info }; } @@ -699,7 +826,7 @@ function getBlockStoredInfo(commitInfo: CommitBlockInfo, commitment: string): St indexRepeatedStorageChanges: commitInfo.indexRepeatedStorageChanges, numberOfLayer1Txs: commitInfo.numberOfLayer1Txs, priorityOperationsHash: commitInfo.priorityOperationsHash, - l2LogsTreeRoot: commitInfo.l2LogsTreeRoot, + l2LogsTreeRoot: ethers.constants.HashZero, timestamp: commitInfo.timestamp, commitment: commitment }; diff --git a/ethereum/test/unit_tests/utils.ts b/ethereum/test/unit_tests/utils.ts index 791c7ea408..c9d4e308c2 100644 --- a/ethereum/test/unit_tests/utils.ts +++ b/ethereum/test/unit_tests/utils.ts @@ -10,6 +10,18 @@ export const L2_SYSTEM_CONTEXT_ADDRESS = `0x000000000000000000000000000000000000 export const L2_BOOTLOADER_ADDRESS = `0x0000000000000000000000000000000000008001`; export const L2_KNOWN_CODE_STORAGE_ADDRESS = `0x0000000000000000000000000000000000008004`; export const L2_TO_L1_MESSENGER = `0x0000000000000000000000000000000000008008`; +export const L2_BYTECODE_COMPRESSOR_ADDRESS = `0x000000000000000000000000000000000000800e`; + +export enum SYSTEM_LOG_KEYS { + L2_TO_L1_LOGS_TREE_ROOT_KEY, + TOTAL_L2_TO_L1_PUBDATA_KEY, + STATE_DIFF_HASH_KEY, + PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + PREV_BLOCK_HASH_KEY, + CHAINED_PRIORITY_TXN_HASH_KEY, + NUMBER_OF_LAYER_1_TXS_KEY, + EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH +} // The default price for the pubdata in L2 gas to be used in L1->L2 transactions export const REQUIRED_L2_GAS_PRICE_PER_PUBDATA = @@ -74,6 +86,48 @@ export async function requestExecute( ); } +export function constructL2Log(isService: boolean, sender: string, key: number | string, value: string) { + return ethers.utils.hexConcat([ + isService ? `0x0001` : `0x0000`, + `0x0000`, + sender, + ethers.utils.hexZeroPad(ethers.utils.hexlify(key), 32), + ethers.utils.hexZeroPad(ethers.utils.hexlify(value), 32) + ]); +} + +export function createSystemLogs() { + return [ + constructL2Log( + true, + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.L2_TO_L1_LOGS_TREE_ROOT_KEY, + ethers.constants.HashZero + ), + constructL2Log( + true, + L2_TO_L1_MESSENGER, + SYSTEM_LOG_KEYS.TOTAL_L2_TO_L1_PUBDATA_KEY, + `0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563` + ), + constructL2Log(true, L2_TO_L1_MESSENGER, SYSTEM_LOG_KEYS.STATE_DIFF_HASH_KEY, ethers.constants.HashZero), + constructL2Log( + true, + L2_SYSTEM_CONTEXT_ADDRESS, + SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + ethers.constants.HashZero + ), + constructL2Log(true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, ethers.constants.HashZero), + constructL2Log(true, L2_BOOTLOADER_ADDRESS, SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY, EMPTY_STRING_KECCAK), + constructL2Log( + true, + L2_BOOTLOADER_ADDRESS, + SYSTEM_LOG_KEYS.NUMBER_OF_LAYER_1_TXS_KEY, + ethers.constants.HashZero + ) + ]; +} + export function genesisStoredBlockInfo(): StoredBlockInfo { return { blockNumber: 0, @@ -89,7 +143,10 @@ export function genesisStoredBlockInfo(): StoredBlockInfo { // Packs the batch timestamp and block timestamp and returns the 32-byte hex string // which should be used for the "key" field of the L2->L1 system context log. -export function packBatchTimestampAndBlockTimestamp(batchTimestamp: number, blockTimestamp: number): string { +export function packBatchTimestampAndBlockTimestamp( + batchTimestamp: BigNumberish, + blockTimestamp: BigNumberish +): string { const packedNum = BigNumber.from(batchTimestamp).shl(128).or(BigNumber.from(blockTimestamp)); return ethers.utils.hexZeroPad(ethers.utils.hexlify(packedNum), 32); } @@ -111,11 +168,7 @@ export interface CommitBlockInfo { indexRepeatedStorageChanges: BigNumberish; newStateRoot: BytesLike; numberOfLayer1Txs: BigNumberish; - l2LogsTreeRoot: BytesLike; priorityOperationsHash: BytesLike; - initialStorageChanges: BytesLike; - repeatedStorageChanges: BytesLike; - l2Logs: BytesLike; - l2ArbitraryLengthMessages: BytesLike[]; - factoryDeps: BytesLike[]; + systemLogs: BytesLike; + totalL2ToL1Pubdata: BytesLike; } diff --git a/ethereum/test/unit_tests/validator_timelock_test.spec.ts b/ethereum/test/unit_tests/validator_timelock_test.spec.ts index fe2b4dae65..938881e17b 100644 --- a/ethereum/test/unit_tests/validator_timelock_test.spec.ts +++ b/ethereum/test/unit_tests/validator_timelock_test.spec.ts @@ -23,13 +23,9 @@ describe(`ValidatorTimelock tests`, function () { indexRepeatedStorageChanges: 0, newStateRoot: ethers.constants.HashZero, numberOfLayer1Txs: 0, - l2LogsTreeRoot: ethers.constants.HashZero, priorityOperationsHash: ethers.constants.HashZero, - initialStorageChanges: `0x00000000`, - repeatedStorageChanges: `0x`, - l2Logs: [], - l2ArbitraryLengthMessages: [], - factoryDeps: [] + systemLogs: [], + totalL2ToL1Pubdata: `0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563` }; } diff --git a/ethereum/yarn.lock b/ethereum/yarn.lock index 3857bd63a1..01bcffcf14 100644 --- a/ethereum/yarn.lock +++ b/ethereum/yarn.lock @@ -8116,9 +8116,9 @@ semver@^6.3.0: integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== semver@^7.3.4: - version "7.5.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.1.tgz#c90c4d631cf74720e46b21c1d37ea07edfab91ec" - integrity sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw== + version "7.5.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" + integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" @@ -8389,9 +8389,9 @@ solidity-comments-extractor@^0.0.7: integrity sha512-wciNMLg/Irp8OKGrh3S2tfvZiZ0NEyILfcRCXCD4mp7SgK/i9gzLfhY2hY7VMCQJ3kH9UB9BzNdibIVMchzyYw== solidity-coverage@^0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/solidity-coverage/-/solidity-coverage-0.8.2.tgz#bc39604ab7ce0a3fa7767b126b44191830c07813" - integrity sha512-cv2bWb7lOXPE9/SSleDO6czkFiMHgP4NXPj+iW9W7iEKLBk7Cj0AGBiNmGX3V1totl9wjPrT0gHmABZKZt65rQ== + version "0.8.3" + resolved "https://registry.yarnpkg.com/solidity-coverage/-/solidity-coverage-0.8.3.tgz#72ce51e5ca9ea1182bbf6085eb1cf526f0603b52" + integrity sha512-hbcNgj5z8zzgTlnp4F0pXiqj1v5ua8P4DH5i9cWOBtFPfUuIohLoXu5WiAixexWmpKVjyxXqupnu/mPb4IGr7Q== dependencies: "@ethersproject/abi" "^5.0.9" "@solidity-parser/parser" "^0.14.1" diff --git a/zksync/yarn.lock b/zksync/yarn.lock index 1373aa570c..4ab9b12be9 100644 --- a/zksync/yarn.lock +++ b/zksync/yarn.lock @@ -374,10 +374,10 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@matterlabs/hardhat-zksync-solc@^0.3.14-beta.3": - version "0.3.14" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.14.tgz#0a32f01b4cd8631ecd8dfe0547e3ac49ab8290d5" - integrity sha512-iKuQ+vvnpv3K2lkFO41xpJcNWH0KHJ/5JbOboTlPZATVR7F3GJeHfJL+GG4wkxKXnxZczpxyQqC4rAfMKvRaDg== +"@matterlabs/hardhat-zksync-solc@^0.3.15": + version "0.3.17" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.17.tgz#72f199544dc89b268d7bfc06d022a311042752fd" + integrity sha512-aZgQ0yfXW5xPkfuEH1d44ncWV4T2LzKZd0VVPo4PL5cUrYs2/II1FaEDp5zsf3FxOR1xT3mBsjuSrtJkk4AL8Q== dependencies: "@nomiclabs/hardhat-docker" "^2.0.0" chalk "4.1.2" From 1a6d864fec060389614a320228dd312196fc9c94 Mon Sep 17 00:00:00 2001 From: Stanislav Breadless Date: Mon, 18 Sep 2023 21:53:35 +0200 Subject: [PATCH 05/11] include full input --- ethereum/contracts/zksync/facets/Executor.sol | 11 +++++++---- ethereum/contracts/zksync/interfaces/IExecutor.sol | 4 ++++ 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/ethereum/contracts/zksync/facets/Executor.sol b/ethereum/contracts/zksync/facets/Executor.sol index 1a06f353ff..d3e04330a6 100644 --- a/ethereum/contracts/zksync/facets/Executor.sol +++ b/ethereum/contracts/zksync/facets/Executor.sol @@ -383,9 +383,7 @@ contract ExecutorFacet is Base, IExecutor { _prevBlockCommitment, _currentBlockCommitment, _verifierParams.recursionNodeLevelVkHash, - _verifierParams.recursionLeafLevelVkHash, - _verifierParams.recursionCircuitsSetVksHash, - _proof.recursiveAggregationInput + _verifierParams.recursionLeafLevelVkHash ) ) ) & INPUT_MASK; @@ -454,7 +452,12 @@ contract ExecutorFacet is Base, IExecutor { bytes32 l2ToL1LogsHash = keccak256(_block.systemLogs); - return abi.encode(l2ToL1LogsHash, _stateDiffHash); + return abi.encode( + l2ToL1LogsHash, + _stateDiffHash, + _block.bootloaderHeapInitialContentsHash, + _block.eventsQueueStateHash + ); } /// @notice Returns the keccak hash of the ABI-encoded StoredBlockInfo diff --git a/ethereum/contracts/zksync/interfaces/IExecutor.sol b/ethereum/contracts/zksync/interfaces/IExecutor.sol index 7c6821d316..19391a51a1 100644 --- a/ethereum/contracts/zksync/interfaces/IExecutor.sol +++ b/ethereum/contracts/zksync/interfaces/IExecutor.sol @@ -53,6 +53,8 @@ interface IExecutor is IBase { /// @param newStateRoot The state root of the full state tree /// @param numberOfLayer1Txs Number of priority operations to be processed /// @param priorityOperationsHash Hash of all priority operations from this block + /// @param bootloaderHeapInitialContentsHash Hash of the initial contents of the bootloader heap. In practice it serves as the commitment to the transactions in the batch. + /// @param eventsQueueStateHash Hash of the events queue state. In practice it serves as the commitment to the events in the batch. /// @param systemLogs concatenation of all L2 -> L1 system logs in the block /// @param totalL2ToL1Pubdata Total pubdata committed to as part of bootloader run. Contents are: l2Tol1Logs <> l2Tol1Messages <> publishedBytecodes <> stateDiffs struct CommitBlockInfo { @@ -62,6 +64,8 @@ interface IExecutor is IBase { bytes32 newStateRoot; uint256 numberOfLayer1Txs; bytes32 priorityOperationsHash; + bytes32 bootloaderHeapInitialContentsHash; + bytes32 eventsQueueStateHash; bytes systemLogs; bytes totalL2ToL1Pubdata; } From d1df4ecae526bad5fc11168395e9fb9020a803e0 Mon Sep 17 00:00:00 2001 From: Stanislav Breadless Date: Tue, 19 Sep 2023 10:34:46 +0200 Subject: [PATCH 06/11] fmt --- ethereum/contracts/zksync/facets/Executor.sol | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/ethereum/contracts/zksync/facets/Executor.sol b/ethereum/contracts/zksync/facets/Executor.sol index d3e04330a6..e9f7a8156a 100644 --- a/ethereum/contracts/zksync/facets/Executor.sol +++ b/ethereum/contracts/zksync/facets/Executor.sol @@ -452,12 +452,13 @@ contract ExecutorFacet is Base, IExecutor { bytes32 l2ToL1LogsHash = keccak256(_block.systemLogs); - return abi.encode( - l2ToL1LogsHash, - _stateDiffHash, - _block.bootloaderHeapInitialContentsHash, - _block.eventsQueueStateHash - ); + return + abi.encode( + l2ToL1LogsHash, + _stateDiffHash, + _block.bootloaderHeapInitialContentsHash, + _block.eventsQueueStateHash + ); } /// @notice Returns the keccak hash of the ABI-encoded StoredBlockInfo From 0f706ff9742a63088af266ab95ea54a36c37fd42 Mon Sep 17 00:00:00 2001 From: Stanislav Breadless Date: Tue, 19 Sep 2023 11:22:52 +0200 Subject: [PATCH 07/11] fix unit tests --- ethereum/test/unit_tests/executor_test.spec.ts | 6 ++++++ ethereum/test/unit_tests/l2-upgrade.test.spec.ts | 4 ++++ ethereum/test/unit_tests/utils.ts | 2 ++ ethereum/test/unit_tests/validator_timelock_test.spec.ts | 2 ++ 4 files changed, 14 insertions(+) diff --git a/ethereum/test/unit_tests/executor_test.spec.ts b/ethereum/test/unit_tests/executor_test.spec.ts index 7814cbe05c..aac399ab04 100644 --- a/ethereum/test/unit_tests/executor_test.spec.ts +++ b/ethereum/test/unit_tests/executor_test.spec.ts @@ -137,6 +137,8 @@ describe(`Executor tests`, function () { newStateRoot: ethers.utils.randomBytes(32), numberOfLayer1Txs: 0, priorityOperationsHash: ethers.utils.randomBytes(32), + bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), + eventsQueueStateHash: ethers.utils.randomBytes(32), systemLogs: `0x`, totalL2ToL1Pubdata: `0x` }; @@ -174,6 +176,8 @@ describe(`Executor tests`, function () { newStateRoot: ethers.utils.randomBytes(32), numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, + bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), + eventsQueueStateHash: ethers.utils.randomBytes(32), systemLogs: logs, totalL2ToL1Pubdata: ethers.constants.HashZero }; @@ -461,6 +465,8 @@ describe(`Executor tests`, function () { newStateRoot: ethers.utils.randomBytes(32), numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, + bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), + eventsQueueStateHash: ethers.utils.randomBytes(32), systemLogs: logs, totalL2ToL1Pubdata: ethers.constants.HashZero }; diff --git a/ethereum/test/unit_tests/l2-upgrade.test.spec.ts b/ethereum/test/unit_tests/l2-upgrade.test.spec.ts index b6cd631b1f..bbb82bb7ec 100644 --- a/ethereum/test/unit_tests/l2-upgrade.test.spec.ts +++ b/ethereum/test/unit_tests/l2-upgrade.test.spec.ts @@ -788,6 +788,8 @@ async function buildCommitBlockInfo( priorityOperationsHash: EMPTY_STRING_KECCAK, systemLogs: ethers.utils.hexConcat([`0x00000007`].concat(systemLogs)), totalL2ToL1Pubdata: ethers.constants.HashZero, + bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), + eventsQueueStateHash: ethers.utils.randomBytes(32), ...info }; } @@ -815,6 +817,8 @@ async function buildCommitBlockInfoWithCustomLogs( priorityOperationsHash: EMPTY_STRING_KECCAK, systemLogs: ethers.utils.hexConcat([size].concat(systemLogs)), totalL2ToL1Pubdata: ethers.constants.HashZero, + bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), + eventsQueueStateHash: ethers.utils.randomBytes(32), ...info }; } diff --git a/ethereum/test/unit_tests/utils.ts b/ethereum/test/unit_tests/utils.ts index c9d4e308c2..bf3733def3 100644 --- a/ethereum/test/unit_tests/utils.ts +++ b/ethereum/test/unit_tests/utils.ts @@ -169,6 +169,8 @@ export interface CommitBlockInfo { newStateRoot: BytesLike; numberOfLayer1Txs: BigNumberish; priorityOperationsHash: BytesLike; + bootloaderHeapInitialContentsHash: BytesLike; + eventsQueueStateHash: BytesLike; systemLogs: BytesLike; totalL2ToL1Pubdata: BytesLike; } diff --git a/ethereum/test/unit_tests/validator_timelock_test.spec.ts b/ethereum/test/unit_tests/validator_timelock_test.spec.ts index 938881e17b..06b71c4a28 100644 --- a/ethereum/test/unit_tests/validator_timelock_test.spec.ts +++ b/ethereum/test/unit_tests/validator_timelock_test.spec.ts @@ -24,6 +24,8 @@ describe(`ValidatorTimelock tests`, function () { newStateRoot: ethers.constants.HashZero, numberOfLayer1Txs: 0, priorityOperationsHash: ethers.constants.HashZero, + bootloaderHeapInitialContentsHash: ethers.utils.randomBytes(32), + eventsQueueStateHash: ethers.utils.randomBytes(32), systemLogs: [], totalL2ToL1Pubdata: `0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563` }; From f76e807720dd02f3f3a476f265ffd203d45851c4 Mon Sep 17 00:00:00 2001 From: Lyova Potyomkin Date: Wed, 20 Sep 2023 13:26:33 +0100 Subject: [PATCH 08/11] Regenerate verifier with new VKs (#30) --- ethereum/contracts/zksync/Verifier.sol | 90 +++---- tools/data/scheduler_key.json | 326 ++++++++++++------------- 2 files changed, 210 insertions(+), 206 deletions(-) diff --git a/ethereum/contracts/zksync/Verifier.sol b/ethereum/contracts/zksync/Verifier.sol index 32549d012b..3dd2f616ad 100644 --- a/ethereum/contracts/zksync/Verifier.sol +++ b/ethereum/contracts/zksync/Verifier.sol @@ -234,10 +234,11 @@ contract Verifier is IVerifier { /// @dev flip of 0xe000000000000000000000000000000000000000000000000000000000000000; uint256 internal constant FR_MASK = 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff; + // non residues - uint256 internal constant NON_RESIDUES_0 = 0x5; - uint256 internal constant NON_RESIDUES_1 = 0x7; - uint256 internal constant NON_RESIDUES_2 = 0xa; + uint256 internal constant NON_RESIDUES_0 = 0x05; + uint256 internal constant NON_RESIDUES_1 = 0x07; + uint256 internal constant NON_RESIDUES_2 = 0x0a; // g2 elements uint256 internal constant G2_ELEMENTS_0_X1 = 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2; @@ -249,6 +250,7 @@ contract Verifier is IVerifier { uint256 internal constant G2_ELEMENTS_1_Y1 = 0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4; uint256 internal constant G2_ELEMENTS_1_Y2 = 0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55; + /// @notice Calculates a keccak256 hash of the runtime loaded verification keys. /// @return vkHash The keccak256 hash of the loaded verification keys. function verificationKeyHash() external pure returns (bytes32 vkHash) { @@ -276,57 +278,59 @@ contract Verifier is IVerifier { /// [table_type] - lookup table type commitment function _loadVerificationKey() internal pure virtual { assembly { + // gate setup commitments - mstore(VK_GATE_SETUP_0_X_SLOT, 0x14c289d746e37aa82ec428491881c4732766492a8bc2e8e3cca2000a40c0ea27) - mstore(VK_GATE_SETUP_0_Y_SLOT, 0x2f617a7eb9808ad9843d1e080b7cfbf99d61bb1b02076c905f31adb12731bc41) - mstore(VK_GATE_SETUP_1_X_SLOT, 0x210b5cc8e6a85d63b65b701b8fb5ad24ff9c41f923432de17fe4ebae04526a8c) - mstore(VK_GATE_SETUP_1_Y_SLOT, 0x05c10ab17ea731b2b87fb890fa5b10bd3d6832917a616b807a9b640888ebc731) - mstore(VK_GATE_SETUP_2_X_SLOT, 0x29d4d14adcfe67a2ac690d6369db6b75e82d8ab3124bc4fa1dd145f41ca6949c) - mstore(VK_GATE_SETUP_2_Y_SLOT, 0x004f6cd229373f1c1f735ccf49aef6a5c32025bc36c3328596dd0db7d87bef67) - mstore(VK_GATE_SETUP_3_X_SLOT, 0x06d15382e8cabae9f98374a9fbdadd424f48e24da7e4c65bf710fd7d7d59a05a) - mstore(VK_GATE_SETUP_3_Y_SLOT, 0x22e438ad5c51673879ce17073a3d2d29327a97dc3ce61c4f88540e00087695f6) - mstore(VK_GATE_SETUP_4_X_SLOT, 0x274a668dfc485cf192d0086f214146d9e02b3040a5a586df344c53c16a87882b) - mstore(VK_GATE_SETUP_4_Y_SLOT, 0x15f5bb7ad01f162b70fc77c8ea456d67d15a6ce98acbbfd521222810f8ec0a66) - mstore(VK_GATE_SETUP_5_X_SLOT, 0x0ba53bf4fb0446927857e33978d02abf45948fc68f4091394ae0827a22cf1e47) - mstore(VK_GATE_SETUP_5_Y_SLOT, 0x0720d818751ce5b3f11c716e925f60df4679ea90bed516499bdec066f5ff108f) - mstore(VK_GATE_SETUP_6_X_SLOT, 0x2e986ba2ea495e5ec6af532980b1dc567f1430bfa82f8de07c12fc097c0e0483) - mstore(VK_GATE_SETUP_6_Y_SLOT, 0x1555d189f6164e82d78de1b8313c2e923e616b3c8ed0e350c3b61c94516d0b58) - mstore(VK_GATE_SETUP_7_X_SLOT, 0x0925959592604ca73c917f9b2e029aa2563c318ddcc5ca29c11badb7b880127b) - mstore(VK_GATE_SETUP_7_Y_SLOT, 0x2b4a430fcb2fa7d6d67d6c358e01cf0524c7df7e1e56442f65b39bc1a1052367) + mstore(VK_GATE_SETUP_0_X_SLOT, 0x13598f50f1a62920416dd9f16d46032cbe810bb384da5b0ff42bdf6ab1e69662) + mstore(VK_GATE_SETUP_0_Y_SLOT, 0x19df99dfa60feb4bd15ddb271efac0179104c5b7a27078115a91079ca2826d51) + mstore(VK_GATE_SETUP_1_X_SLOT, 0x2cf47b2a9e17ea94be18ecb13eec2e5706880527ed0d88fd5969ac069a8aac06) + mstore(VK_GATE_SETUP_1_Y_SLOT, 0x2e3294b57b45a03075ee2e89c9e7d2ab5f261b5e2408578112b6aa7bf12a2e5f) + mstore(VK_GATE_SETUP_2_X_SLOT, 0x0377df0615f3f859583ca885861a6c0f885778f0633d082b9883afcbcb186c6c) + mstore(VK_GATE_SETUP_2_Y_SLOT, 0x0cde99a920bf69e9ff1d42b719e2895f0da6d6a334f2a2a4e135cc93bf515a4a) + mstore(VK_GATE_SETUP_3_X_SLOT, 0x042222cc79ad515fdae98dddc09d8e78fc8af37a69baf36a04188ca54334c443) + mstore(VK_GATE_SETUP_3_Y_SLOT, 0x0f0158e345c08da2021b703aac1b697d2f3553617fef778bf2c5fafc40324be1) + mstore(VK_GATE_SETUP_4_X_SLOT, 0x0102fab7a43b3ac26db87f53bd303cf44d6f7c963046681422c32dea20a526df) + mstore(VK_GATE_SETUP_4_Y_SLOT, 0x221a29c1df2e5822d060261802073700a285082ebb1263e53814b017a8bfb14b) + mstore(VK_GATE_SETUP_5_X_SLOT, 0x269fbe8da379085e789a60720c12324ee4efc0f9215bfc30a65134ecbe875682) + mstore(VK_GATE_SETUP_5_Y_SLOT, 0x0134b14e94b7a719d213a0e7136547add67e71723ed82c87e8a0f8926fb002ac) + mstore(VK_GATE_SETUP_6_X_SLOT, 0x07d50d604f391c8f7331ce2df7115aa8a6db6bcc1ad246255b12da2e80f7d815) + mstore(VK_GATE_SETUP_6_Y_SLOT, 0x16f31ebefd077f85029a1af6136a2d63e8e92015ea44bac6386cb05fc7aca56e) + mstore(VK_GATE_SETUP_7_X_SLOT, 0x1ceeda30b083c5b748a1dbbefc4b8324e35cb9830c630b8064ece3a7c30e5f4a) + mstore(VK_GATE_SETUP_7_Y_SLOT, 0x0037029b76eb626e15fefc39fb9c142499e9ee283063b2811afff7211b9a0b63) // gate selectors commitments - mstore(VK_GATE_SELECTORS_0_X_SLOT, 0x28f2a0a95af79ba67e9dd1986bd3190199f661b710a693fc82fb395c126edcbd) - mstore(VK_GATE_SELECTORS_0_Y_SLOT, 0x0db75db5de5192d1ba1c24710fc00da16fa8029ac7fe82d855674dcd6d090e05) - mstore(VK_GATE_SELECTORS_1_X_SLOT, 0x143471a174dfcb2d9cb5ae621e519387bcc93c9dcfc011160b2f5c5f88e32cbe) - mstore(VK_GATE_SELECTORS_1_Y_SLOT, 0x2a0194c0224c3d964223a96c4c99e015719bc879125aa0df3f0715d154e71a31) + mstore(VK_GATE_SELECTORS_0_X_SLOT, 0x1885051503e3667eac61262e28b7cedd16a619d19e8820a522a269dc98262c69) + mstore(VK_GATE_SELECTORS_0_Y_SLOT, 0x0daf7bffe99c0357aefcd6ccaf363ce5096e48d105fc8583e649589cc149183d) + mstore(VK_GATE_SELECTORS_1_X_SLOT, 0x215a735844e0035f245a45154f19635bdc70d8f48eef46384c2c948002af0c81) + mstore(VK_GATE_SELECTORS_1_Y_SLOT, 0x1faecf6813ffbc0a2eeeb50ca035d4542ca557f15e2b82ffffb24024cf3df2d1) // permutation commitments - mstore(VK_PERMUTATION_0_X_SLOT, 0x1423fa82e00ba22c280181afb12c56eea541933eeb5ec39119b0365b6beab4b9) - mstore(VK_PERMUTATION_0_Y_SLOT, 0x0efdcd3423a38f5e2ecf8c7e4fd46f13189f8fed392ad9d8d393e8ba568b06e4) - mstore(VK_PERMUTATION_1_X_SLOT, 0x0e9b5b12c1090d62224e64aa1696c009aa59a9c3eec458e781fae773e1f4eca5) - mstore(VK_PERMUTATION_1_Y_SLOT, 0x1fe3df508c7e9750eb37d9cae5e7437ad11a21fa36530ff821b407b165a79a55) - mstore(VK_PERMUTATION_2_X_SLOT, 0x25d1a714bd1e258f196e38d6b2826153382c2d04b870d0b7ec250296005129ae) - mstore(VK_PERMUTATION_2_Y_SLOT, 0x0883a121b41ca7beaa9de97ecf4417e62aa2eeb9434f24ddacbfed57cbf016a8) - mstore(VK_PERMUTATION_3_X_SLOT, 0x2f3ede68e854a6b3b14589851cf077a606e2aeb3205c43cc579b7abae39d8f58) - mstore(VK_PERMUTATION_3_Y_SLOT, 0x178ccd4b1f78fd79ee248e376b6fc8297d5450900d1e15e8c03e3ed2c171ac8c) + mstore(VK_PERMUTATION_0_X_SLOT, 0x2f24b1ffdea08385d0ca89b32db8601b3b95b255903edceac52402dfece9eee4) + mstore(VK_PERMUTATION_0_Y_SLOT, 0x1e90e41ecb35e8455c55bf4cfd41e80bb4e5dd845b65c6f77e2ac0821b09a883) + mstore(VK_PERMUTATION_1_X_SLOT, 0x128d006b473b50bc85a1a2707c278e08186e4402010178bf930b7ea24e203ed0) + mstore(VK_PERMUTATION_1_Y_SLOT, 0x0fa100f35e66d3c2429f1c20be00fe21b79a65e9de665ba420ee388ee0b47b3c) + mstore(VK_PERMUTATION_2_X_SLOT, 0x3026de72fd5e47f96c7d0f4f0c15cfd5f50438991fdbb18d4c5d18561b0e87cd) + mstore(VK_PERMUTATION_2_Y_SLOT, 0x20162d33d551a2599be33cfba40358bdab4338891cfe7e028355e5a50442b5a4) + mstore(VK_PERMUTATION_3_X_SLOT, 0x0a811f316900d60021c0ef765bcfc2070cc3a8b38b09a3b4be04e37e292ebaf1) + mstore(VK_PERMUTATION_3_Y_SLOT, 0x192c167388faa4266ced24b8e3687e81974aa8caaf1ac3e8a3fab3947c7b65d1) // lookup tables commitments - mstore(VK_LOOKUP_TABLE_0_X_SLOT, 0x0ebe0de4a2f39df3b903da484c1641ffdffb77ff87ce4f9508c548659eb22d3c) - mstore(VK_LOOKUP_TABLE_0_Y_SLOT, 0x12a3209440242d5662729558f1017ed9dcc08fe49a99554dd45f5f15da5e4e0b) - mstore(VK_LOOKUP_TABLE_1_X_SLOT, 0x1b7d54f8065ca63bed0bfbb9280a1011b886d07e0c0a26a66ecc96af68c53bf9) - mstore(VK_LOOKUP_TABLE_1_Y_SLOT, 0x2c51121fff5b8f58c302f03c74e0cb176ae5a1d1730dec4696eb9cce3fe284ca) - mstore(VK_LOOKUP_TABLE_2_X_SLOT, 0x0138733c5faa9db6d4b8df9748081e38405999e511fb22d40f77cf3aef293c44) - mstore(VK_LOOKUP_TABLE_2_Y_SLOT, 0x269bee1c1ac28053238f7fe789f1ea2e481742d6d16ae78ed81e87c254af0765) - mstore(VK_LOOKUP_TABLE_3_X_SLOT, 0x1b1be7279d59445065a95f01f16686adfa798ec4f1e6845ffcec9b837e88372e) - mstore(VK_LOOKUP_TABLE_3_Y_SLOT, 0x057c90cb96d8259238ed86b05f629efd55f472a721efeeb56926e979433e6c0e) + mstore(VK_LOOKUP_TABLE_0_X_SLOT, 0x2c513ed74d9d57a5ec901e074032741036353a2c4513422e96e7b53b302d765b) + mstore(VK_LOOKUP_TABLE_0_Y_SLOT, 0x04dd964427e430f16004076d708c0cb21e225056cc1d57418cfbd3d472981468) + mstore(VK_LOOKUP_TABLE_1_X_SLOT, 0x1ea83e5e65c6f8068f4677e2911678cf329b28259642a32db1f14b8347828aac) + mstore(VK_LOOKUP_TABLE_1_Y_SLOT, 0x1d22bc884a2da4962a893ba8de13f57aaeb785ed52c5e686994839cab8f7475d) + mstore(VK_LOOKUP_TABLE_2_X_SLOT, 0x0b2e7212d0d9cff26d0bdf3d79b2cac029a25dfeb1cafdf49e2349d7db348d89) + mstore(VK_LOOKUP_TABLE_2_Y_SLOT, 0x1301f9b252419ea240eb67fda720ca0b16d92364027285f95e9b1349490fa283) + mstore(VK_LOOKUP_TABLE_3_X_SLOT, 0x02f7b99fdfa5b418548c2d777785820e02383cfc87e7085e280a375a358153bf) + mstore(VK_LOOKUP_TABLE_3_Y_SLOT, 0x09d004fe08dc4d19c382df36fad22ef676185663543703e6a4b40203e50fd8a6) // lookup selector commitment - mstore(VK_LOOKUP_SELECTOR_X_SLOT, 0x1f814e2d87c332e964eeef94ec695eef9d2caaac58b682a43da5107693b06f30) - mstore(VK_LOOKUP_SELECTOR_Y_SLOT, 0x196d56fb01907e66af9303886fd95328d398e5b2b72906882a9d12c1718e2ee2) + mstore(VK_LOOKUP_SELECTOR_X_SLOT, 0x300223627d9e977c0b02a64b49cb1c6c7833b82e25bacb420ef6f48b6deeed06) + mstore(VK_LOOKUP_SELECTOR_Y_SLOT, 0x2d0bd4b927912e6509088fb7167a30b85bd5d031e75b057b687e62fc6d779f91) // table type commitment - mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x2f85df2d6249ccbcc11b91727333cc800459de6ee274f29c657c8d56f6f01563) - mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x088e1df178c47116a69c3c8f6d0c5feb530e2a72493694a623b1cceb7d44a76c) + mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x006f5f6969088413e15abe92418dd7668c6c5fa2dcca168e71a98d84a9fe39de) + mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x18550c804fadc55861b6a34d5341d594486833e62bd6137089f3335566ca40ee) + } } diff --git a/tools/data/scheduler_key.json b/tools/data/scheduler_key.json index c3262193a4..460fdfab3a 100644 --- a/tools/data/scheduler_key.json +++ b/tools/data/scheduler_key.json @@ -1,126 +1,126 @@ { - "n": 67108863, + "n": 16777215, "num_inputs": 1, "state_width": 4, "num_witness_polys": 0, "gate_setup_commitments": [ { "x": [ - 14745348174000482855, - 2839037062185937123, - 3369862715588854899, - 1495909583940713128 + 17594402018597181026, + 13727266004857936655, + 4714664015870427948, + 1394303137462233376 ], "y": [ - 6859454683840363585, - 11340551061368171664, - 9528805406487149561, - 3414144677220223705 + 6526005704359308625, + 10449694427901360145, + 15086455287830986775, + 1864377956597492555 ], "infinity": false }, { "x": [ - 9215749870136224396, - 18418669114332753377, - 13140219601461030180, - 2381098845928447331 + 6442869886292765702, + 470631830099167485, + 13697958512656395863, + 3239349454957439636 ], "y": [ - 8834765081837029169, - 4424842234296363904, - 13294547557836067005, - 414624398145171890 + 1348452587747815007, + 6856197573863561089, + 8497780716227515051, + 3328886581758369840 ], "infinity": false }, { "x": [ - 2148575411987453084, - 16730180692461995258, - 12423475767707134837, - 3014264170083149730 + 10989820805534739564, + 9824454085984454699, + 6358142065374293007, + 249913521573460057 ], "y": [ - 10870860158804422503, - 14060279526953529989, - 2266257082861680293, - 22356173050560284 + 16228101767241488970, + 983709565083361956, + 18382922608176957791, + 927347524962839017 ], "infinity": false }, { "x": [ - 17803008042411335770, - 5713064950476621403, - 17979342410816871746, - 491265656076548841 + 295140417017791555, + 18197624951429329770, + 15774295153400712824, + 297838786969948511 ], "y": [ - 9823492080506672630, - 3637386621225409615, - 8776978043600973097, - 2514196809208915768 + 17493664288906300385, + 3401716771771611019, + 151838409744935293, + 1081243118696697250 ], "infinity": false }, { "x": [ - 3768479078383323179, - 16153057542709544671, - 10578964798085613273, - 2831188075764800753 + 2504896301341419231, + 5579815447832193044, + 7906209143481974004, + 72896210632653506 ], "y": [ - 2387514805820590694, - 15085489652142686165, - 8141513931186597223, - 1582376980242699819 + 4041048381320048971, + 11710775402846905317, + 15015043042208331520, + 2457322459333810210 ], "infinity": false }, { "x": [ - 5395455814671474247, - 5013790368139874617, - 8671649443504728767, - 839142828943885970 + 11984418274823067266, + 16496616136320482352, + 8690364473909850702, + 2783152610280540254 ], "y": [ - 11231626069154926735, - 5078347962234771017, - 17373886182204596447, - 513647957075879347 + 16762671120895705772, + 15455915706676685959, + 15137619686845663149, + 86889243887511321 ], "infinity": false }, { "x": [ - 8940485327950054531, - 9156997542069636576, - 14316753178152000598, - 3357551869664255582 + 6562547500299311125, + 12023322154563159589, + 8300642285034494632, + 564372035602029711 ], "y": [ - 14102490706504125272, - 4494991810930729808, - 15532318871086968466, - 1537365238286274178 + 4065818489027077486, + 16782980764869638854, + 187491978679299427, + 1653699293812850565 ], "infinity": false }, { "x": [ - 13914906478277300859, - 6213896071228541481, - 4364409818367302306, - 659097390118096039 + 7272437807968051018, + 16383173516966693760, + 5233705855304303396, + 2084843580173829559 ], "y": [ - 7328372274594390887, - 2650332638498669615, - 15455628473476960005, - 3119379427019958230 + 1945545285616405347, + 11090657413754499713, + 1584981444845638692, + 15483990457410158 ], "infinity": false } @@ -128,31 +128,31 @@ "gate_selectors_commitments": [ { "x": [ - 9438200511694036157, - 11094162170960057340, - 9123678872696723713, - 2950597355117190054 + 2495673539683036265, + 1632020303066964133, + 12421251226958024413, + 1766824016630474366 ], "y": [ - 6153972960518016517, - 8045683598100955864, - 13410633858416643489, - 988361678931464913 + 16593891732304238653, + 679560656365323651, + 12609189231331785957, + 986143182483620695 ], "infinity": false }, { "x": [ - 805964423710846142, - 13603470797942296854, - 11292123377140077447, - 1455913517812009773 + 5488925323379543169, + 15884434430617404984, + 2619482091093058395, + 2403360174105428831 ], "y": [ - 4541622738043214385, - 8186357170000535775, - 4765839113294831637, - 3026863977499737494 + 18424859552366064337, + 3217074203004732159, + 3381839436033938516, + 2282990107042495498 ], "infinity": false } @@ -160,155 +160,155 @@ "permutation_commitments": [ { "x": [ - 1851039213129741497, - 11907960788190413713, - 2882727828085561070, - 1451278944954982956 + 14205482285409103588, + 4293533900329639146, + 15044988907843772443, + 3397035731454034821 ], "y": [ - 15245785050592773860, - 1774295027236395480, - 3373069120056880915, - 1080245109458702174 + 9091290462832666755, + 13035068257032586999, + 6653434361897150475, + 2202511038693632069 ], "infinity": false }, { "x": [ - 9366052859968548005, - 12275028918364559591, - 2472023927159177225, - 1052535074027277666 + 10595701783879761616, + 1760419279745939647, + 9629156082297835016, + 1336725125155475644 ], "y": [ - 2428574557555628629, - 15067392861858369528, - 16949255188095910778, - 2297925771936569168 + 2372896239977265980, + 13229998910487288740, + 4800586654752505377, + 1126182427080184770 ], "infinity": false }, { "x": [ - 17016009610362956206, - 4047659663396753591, - 1832464593155416403, - 2725142957049914767 + 5502581077816805325, + 17655298669514830221, + 7817421360364638165, + 3469705148393998329 ], "y": [ - 12447928856414787240, - 3072280375285720285, - 12294239288643819494, - 613511140380288958 + 9463722688892810660, + 12340769565473406466, + 11232888947078289597, + 2312085159346807385 ], "infinity": false }, { "x": [ - 6312774245791141720, - 496150993329472460, - 12773767122915456934, - 3404402910494500531 + 13692318898109463281, + 919764237998990260, + 2432207090405589511, + 756920509450475008 ], "y": [ - 13852578578747731084, - 9030931732410275304, - 17159996848865265705, - 1696956882146098553 + 11815954022684911057, + 10901711436478333928, + 7848970102084501121, + 1813849435398644774 ], "infinity": false } ], - "total_lookup_entries_length": 1073530, + "total_lookup_entries_length": 1786644, "lookup_selector_commitment": { "x": [ - 4441974708940861232, - 11325614820129407652, - 7273013871150456559, - 2270181644629652201 + 1078318040483818758, + 8661469016704600898, + 793379326661434476, + 3459366369695274876 ], "y": [ - 3070631142979677922, - 15247189094202672776, - 12651459662740804392, - 1832216259472686694 + 7529564463317884817, + 6617424140244288891, + 650928162673930424, + 3245921848148438629 ], "infinity": false }, "lookup_tables_commitments": [ { "x": [ - 631990924006796604, - 16139625628991115157, - 13331739325995827711, - 1062301837743594995 + 10873859091125335643, + 3906092213625635374, + 17046157606087980048, + 3193402705223440293 ], "y": [ - 15303054606290800139, - 15906872095881647437, - 7093896572295020249, - 1342952934989901142 + 10158946293873382504, + 2171386304067884865, + 6918663094168980658, + 350601565475975409 ], "infinity": false }, { "x": [ - 7983921919542246393, - 13296544189644416678, - 17081022784392007697, - 1980832835348244027 + 12822112641313049260, + 3646552465186399021, + 10324071010773924047, + 2209084192380614662 ], "y": [ - 10874958134865200330, - 7702740658637630534, - 14052057929798961943, - 3193353539419869016 + 11045141628975531869, + 12589678537679955590, + 3065046617868727674, + 2099447669854151830 ], "infinity": false }, { "x": [ - 1114587284824996932, - 4636906500482867924, - 15328247172597030456, - 87946895873973686 + 11395032673621937545, + 3000063650268118516, + 7857619430005721792, + 805706808484810738 ], "y": [ - 15573033830207915877, - 5194694185599035278, - 2562407345425607214, - 2782078999306862675 + 6817063666434679427, + 1646386051225388537, + 4677946977082722827, + 1369650305976868514 ], "infinity": false }, { "x": [ - 18225112781127431982, - 18048613958187123807, - 7325490730844456621, - 1953409020724855888 + 2885179371868476351, + 159944842081142878, + 6092294387055034894, + 213843603626505240 ], "y": [ - 7577000130125917198, - 6193701449695751861, - 4102082927677054717, - 395350071385269650 + 11868113133779277990, + 8509646480531194854, + 14088068011597639414, + 707070630614027545 ], "infinity": false } ], "lookup_table_type_commitment": { "x": [ - 7312875299592476003, - 313526216906044060, - 13914875394436353152, - 3424388477700656316 + 8190232998277167582, + 10118567615888234126, + 16238500941812062054, + 31348628753253395 ], "y": [ - 2572062173996296044, - 5984767625164919974, - 12005537293370417131, - 616463121946800406 + 9940345244426584302, + 5217477231979664240, + 7040994619944981908, + 1753321375162811736 ], "infinity": false }, @@ -396,4 +396,4 @@ "infinity": false } ] -} \ No newline at end of file +} From 82d18238018b5c25cb07367b6caf1bdd496fb671 Mon Sep 17 00:00:00 2001 From: Stanislav Breadless Date: Wed, 20 Sep 2023 14:28:30 +0200 Subject: [PATCH 09/11] fix formatting --- ethereum/contracts/zksync/Verifier.sol | 4 ---- 1 file changed, 4 deletions(-) diff --git a/ethereum/contracts/zksync/Verifier.sol b/ethereum/contracts/zksync/Verifier.sol index 3dd2f616ad..a5e8d05173 100644 --- a/ethereum/contracts/zksync/Verifier.sol +++ b/ethereum/contracts/zksync/Verifier.sol @@ -234,7 +234,6 @@ contract Verifier is IVerifier { /// @dev flip of 0xe000000000000000000000000000000000000000000000000000000000000000; uint256 internal constant FR_MASK = 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff; - // non residues uint256 internal constant NON_RESIDUES_0 = 0x05; uint256 internal constant NON_RESIDUES_1 = 0x07; @@ -250,7 +249,6 @@ contract Verifier is IVerifier { uint256 internal constant G2_ELEMENTS_1_Y1 = 0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4; uint256 internal constant G2_ELEMENTS_1_Y2 = 0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55; - /// @notice Calculates a keccak256 hash of the runtime loaded verification keys. /// @return vkHash The keccak256 hash of the loaded verification keys. function verificationKeyHash() external pure returns (bytes32 vkHash) { @@ -278,7 +276,6 @@ contract Verifier is IVerifier { /// [table_type] - lookup table type commitment function _loadVerificationKey() internal pure virtual { assembly { - // gate setup commitments mstore(VK_GATE_SETUP_0_X_SLOT, 0x13598f50f1a62920416dd9f16d46032cbe810bb384da5b0ff42bdf6ab1e69662) mstore(VK_GATE_SETUP_0_Y_SLOT, 0x19df99dfa60feb4bd15ddb271efac0179104c5b7a27078115a91079ca2826d51) @@ -330,7 +327,6 @@ contract Verifier is IVerifier { // table type commitment mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x006f5f6969088413e15abe92418dd7668c6c5fa2dcca168e71a98d84a9fe39de) mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x18550c804fadc55861b6a34d5341d594486833e62bd6137089f3335566ca40ee) - } } From 9bd2411a3f4bffe7b8d5fe0459a94ecb27cbfd69 Mon Sep 17 00:00:00 2001 From: Stanislav Bezkorovainyi Date: Wed, 27 Sep 2023 11:46:28 +0200 Subject: [PATCH 10/11] Rename block -> batch where appropriate (#33) Co-authored-by: Vlad Bochok <41153528+vladbochok@users.noreply.github.com> --- SystemConfig.json | 8 +- docs/Overview.md | 25 +- ethereum/contracts/bridge/L1ERC20Bridge.sol | 30 +- ethereum/contracts/bridge/L1WethBridge.sol | 26 +- .../contracts/bridge/interfaces/IL1Bridge.sol | 10 +- .../dev-contracts/test/DummyExecutor.sol | 104 +++--- .../contracts/upgrades/BaseZkSyncUpgrade.sol | 16 +- ethereum/contracts/zksync/Config.sol | 16 +- ethereum/contracts/zksync/DiamondInit.sol | 20 +- ethereum/contracts/zksync/Storage.sol | 34 +- .../contracts/zksync/ValidatorTimelock.sol | 63 ++-- ethereum/contracts/zksync/facets/Executor.sol | 331 ++++++++--------- ethereum/contracts/zksync/facets/Getters.sol | 93 +++-- ethereum/contracts/zksync/facets/Mailbox.sol | 64 ++-- .../contracts/zksync/interfaces/IExecutor.sol | 66 ++-- .../contracts/zksync/interfaces/IGetters.sol | 14 +- .../zksync/interfaces/ILegacyGetters.sol | 21 ++ .../contracts/zksync/interfaces/IMailbox.sol | 16 +- .../zksync/libraries/TransactionValidator.sol | 32 +- ethereum/scripts/utils.ts | 2 +- ethereum/src.ts/deploy.ts | 12 +- .../test/unit_tests/executor_test.spec.ts | 334 +++++++++--------- .../unit_tests/l1_erc20_bridge_test.spec.ts | 2 +- .../test/unit_tests/l2-upgrade.test.spec.ts | 196 +++++----- ethereum/test/unit_tests/utils.ts | 30 +- .../validator_timelock_test.spec.ts | 144 ++++---- .../contracts/bridge/interfaces/IL1Bridge.sol | 4 +- 27 files changed, 897 insertions(+), 816 deletions(-) create mode 100644 ethereum/contracts/zksync/interfaces/ILegacyGetters.sol diff --git a/SystemConfig.json b/SystemConfig.json index c87bbc233f..fac1bb21b4 100644 --- a/SystemConfig.json +++ b/SystemConfig.json @@ -1,12 +1,12 @@ { "L2_TX_MAX_GAS_LIMIT": 80000000, - "MAX_PUBDATA_PER_BLOCK": 110000, + "MAX_PUBDATA_PER_BATCH": 110000, "PRIORITY_TX_MAX_PUBDATA": 99000, "FAIR_L2_GAS_PRICE": 500000000, "L1_GAS_PER_PUBDATA_BYTE": 17, - "BLOCK_OVERHEAD_L2_GAS": 1200000, - "BLOCK_OVERHEAD_L1_GAS": 1000000, - "MAX_TRANSACTIONS_IN_BLOCK": 1024, + "BATCH_OVERHEAD_L2_GAS": 1200000, + "BATCH_OVERHEAD_L1_GAS": 1000000, + "MAX_TRANSACTIONS_IN_BATCH": 1024, "BOOTLOADER_TX_ENCODING_SPACE": 485225, "L1_TX_INTRINSIC_L2_GAS": 167157, "L1_TX_INTRINSIC_PUBDATA": 88, diff --git a/docs/Overview.md b/docs/Overview.md index 1a8bddfe6c..b5c0afeb3a 100644 --- a/docs/Overview.md +++ b/docs/Overview.md @@ -19,7 +19,8 @@ See the [documentation](https://era.zksync.io/docs/dev/fundamentals/rollups.html - **Governor** - a privileged address that controls the upgradability of the network and sets other privileged addresses. - **Security council** - an address of the Gnosis multisig with the trusted owners that can decrease upgrade timelock. -- **Validator/Operator** - a privileged address that can commit/verify/execute L2 blocks. +- **Validator/Operator** - a privileged address that can commit/verify/execute L2 batches. +- **L2 batch (or just batch)** - An aggregation of multiple L2 blocks. Note, that while the API operates on L2 blocks, the prove system operates on batches, which represent a single proved VM execution, which typically contains multiple L2 blocks. - **Facet** - implementation contract. The word comes from the EIP-2535. - **Gas** - a unit that measures the amount of computational effort required to execute specific operations on the zkSync Era network. @@ -143,11 +144,11 @@ burn the funds on L2, allowing the user to reclaim them through the `finalizeEth L2 -> L1 communication, in contrast to L1 -> L2 communication, is based only on transferring the information, and not on the transaction execution on L1. -From the L2 side, there is a special zkEVM opcode that saves `l2ToL1Log` in the L2 block. A validator will send all -`l2ToL1Logs` when sending an L2 block to the L1 (see `ExecutorFacet`). Later on, users will be able to both read their +From the L2 side, there is a special zkEVM opcode that saves `l2ToL1Log` in the L2 batch. A validator will send all +`l2ToL1Logs` when sending an L2 batch to the L1 (see `ExecutorFacet`). Later on, users will be able to both read their `l2ToL1logs` on L1 and _prove_ that they sent it. -From the L1 side, for each L2 block, a Merkle root with such logs in leaves is calculated. Thus, a user can provide +From the L1 side, for each L2 batch, a Merkle root with such logs in leaves is calculated. Thus, a user can provide Merkle proof for each `l2ToL1Logs`. _NOTE_: For each executed L1 -> L2 transaction, the system program necessarily sends an L2 -> L1 log. To verify the @@ -164,18 +165,18 @@ this trick: #### ExecutorFacet -A contract that accepts L2 blocks, enforces data availability and checks the validity of zk-proofs. +A contract that accepts L2 batches, enforces data availability and checks the validity of zk-proofs. The state transition is divided into three stages: -- `commitBlocks` - check L2 block timestamp, process the L2 logs, save data for a block, and prepare data for zk-proof. -- `proveBlocks` - validate zk-proof. -- `executeBlocks` - finalize the state, marking L1 -> L2 communication processing, and saving Merkle tree with L2 logs. +- `commitBatches` - check L2 batch timestamp, process the L2 logs, save data for a batch, and prepare data for zk-proof. +- `proveBatches` - validate zk-proof. +- `executeBatches` - finalize the state, marking L1 -> L2 communication processing, and saving Merkle tree with L2 logs. -When a block is committed, we process L2 -> L1 logs. Here are the invariants that are expected there: +When a batch is committed, we process L2 -> L1 logs. Here are the invariants that are expected there: -- The only L2 -> L1 log from the `L2_SYSTEM_CONTEXT_ADDRESS`, with the `key == l2BlockTimestamp` and - `value == l2BlockHash`. +- The only L2 -> L1 log from the `L2_SYSTEM_CONTEXT_ADDRESS`, with the `key == l2BatchTimestamp` and + `value == l2BatchHash`. - Several (or none) logs from the `L2_KNOWN_CODE_STORAGE_ADDRESS` with the `key == bytecodeHash`, where bytecode is marked as a known factory dependency. - Several (or none) logs from the `L2_BOOTLOADER_ADDRESS` with the `key == canonicalTxHash` where `canonicalTxHash` is a @@ -230,7 +231,7 @@ the L1 recipient. #### ValidatorTimelock An intermediate smart contract between the validator EOA account and the zkSync smart contract. Its primary purpose is -to provide a trustless means of delaying block execution without modifying the main zkSync contract. zkSync actively +to provide a trustless means of delaying batch execution without modifying the main zkSync contract. zkSync actively monitors the chain activity and reacts to any suspicious activity by freezing the chain. This allows time for investigation and mitigation before resuming normal operations. diff --git a/ethereum/contracts/bridge/L1ERC20Bridge.sol b/ethereum/contracts/bridge/L1ERC20Bridge.sol index 5eb492bd1b..b2589590c1 100644 --- a/ethereum/contracts/bridge/L1ERC20Bridge.sol +++ b/ethereum/contracts/bridge/L1ERC20Bridge.sol @@ -33,7 +33,7 @@ contract L1ERC20Bridge is IL1Bridge, IL1BridgeLegacy, AllowListed, ReentrancyGua /// @dev zkSync smart contract that is used to operate with L2 via asynchronous L2 <-> L1 communication IZkSync internal immutable zkSync; - /// @dev A mapping L2 block number => message number => flag + /// @dev A mapping L2 batch number => message number => flag /// @dev Used to indicate that zkSync L2 -> L1 message was already processed mapping(uint256 => mapping(uint256 => bool)) public isWithdrawalFinalized; @@ -244,24 +244,24 @@ contract L1ERC20Bridge is IL1Bridge, IL1BridgeLegacy, AllowListed, ReentrancyGua /// @param _depositSender The address of the deposit initiator /// @param _l1Token The address of the deposited L1 ERC20 token /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization - /// @param _l2BlockNumber The L2 block number where the deposit finalization was processed + /// @param _l2BatchNumber The L2 batch number where the deposit finalization was processed /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization function claimFailedDeposit( address _depositSender, address _l1Token, bytes32 _l2TxHash, - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes32[] calldata _merkleProof ) external nonReentrant senderCanCallFunction(allowList) { bool proofValid = zkSync.proveL1ToL2TransactionStatus( _l2TxHash, - _l2BlockNumber, + _l2BatchNumber, _l2MessageIndex, - _l2TxNumberInBlock, + _l2TxNumberInBatch, _merkleProof, TxStatus.Failure ); @@ -281,22 +281,22 @@ contract L1ERC20Bridge is IL1Bridge, IL1BridgeLegacy, AllowListed, ReentrancyGua } /// @notice Finalize the withdrawal and release funds - /// @param _l2BlockNumber The L2 block number where the withdrawal was processed + /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent /// @param _message The L2 withdraw data, stored in an L2 -> L1 message /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization function finalizeWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes calldata _message, bytes32[] calldata _merkleProof ) external nonReentrant senderCanCallFunction(allowList) { - require(!isWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex], "pw"); + require(!isWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex], "pw"); L2Message memory l2ToL1Message = L2Message({ - txNumberInBlock: _l2TxNumberInBlock, + txNumberInBatch: _l2TxNumberInBatch, sender: l2Bridge, data: _message }); @@ -304,11 +304,11 @@ contract L1ERC20Bridge is IL1Bridge, IL1BridgeLegacy, AllowListed, ReentrancyGua (address l1Receiver, address l1Token, uint256 amount) = _parseL2WithdrawalMessage(l2ToL1Message.data); // Preventing the stack too deep error { - bool success = zkSync.proveL2MessageInclusion(_l2BlockNumber, _l2MessageIndex, l2ToL1Message, _merkleProof); + bool success = zkSync.proveL2MessageInclusion(_l2BatchNumber, _l2MessageIndex, l2ToL1Message, _merkleProof); require(success, "nq"); } - isWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex] = true; + isWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex] = true; // Withdraw funds IERC20(l1Token).safeTransfer(l1Receiver, amount); diff --git a/ethereum/contracts/bridge/L1WethBridge.sol b/ethereum/contracts/bridge/L1WethBridge.sol index c6335e7832..50388a50f7 100644 --- a/ethereum/contracts/bridge/L1WethBridge.sol +++ b/ethereum/contracts/bridge/L1WethBridge.sol @@ -54,7 +54,7 @@ contract L1WethBridge is IL1Bridge, AllowListed, ReentrancyGuard { /// @dev The address of the WETH on L2 address public l2WethAddress; - /// @dev A mapping L2 block number => message number => flag + /// @dev A mapping L2 batch number => message number => flag /// @dev Used to indicate that zkSync L2 -> L1 WETH message was already processed mapping(uint256 => mapping(uint256 => bool)) public isWithdrawalFinalized; @@ -209,45 +209,45 @@ contract L1WethBridge is IL1Bridge, AllowListed, ReentrancyGuard { address, // _depositSender, address, // _l1Token, bytes32, // _l2TxHash - uint256, // _l2BlockNumber, + uint256, // _l2BatchNumber, uint256, // _l2MessageIndex, - uint16, // _l2TxNumberInBlock, + uint16, // _l2TxNumberInBatch, bytes32[] calldata // _merkleProof ) external pure { revert("Method not supported. Failed deposit funds are sent to the L2 refund recipient address."); } /// @notice Finalize the withdrawal and release funds - /// @param _l2BlockNumber The L2 block number where the ETH (WETH) withdrawal was processed + /// @param _l2BatchNumber The L2 batch number where the ETH (WETH) withdrawal was processed /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the ETH withdrawal message containing additional data about WETH withdrawal - /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the ETH withdrawal log was sent + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the ETH withdrawal log was sent /// @param _message The L2 withdraw data, stored in an L2 -> L1 message /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization function finalizeWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes calldata _message, bytes32[] calldata _merkleProof ) external nonReentrant senderCanCallFunction(allowList) { - require(!isWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex], "Withdrawal is already finalized"); + require(!isWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex], "Withdrawal is already finalized"); (address l1WethWithdrawReceiver, uint256 amount) = _parseL2EthWithdrawalMessage(_message); // Check if the withdrawal has already been finalized on L2. - bool alreadyFinalised = zkSync.isEthWithdrawalFinalized(_l2MessageIndex, _l2TxNumberInBlock); + bool alreadyFinalised = zkSync.isEthWithdrawalFinalized(_l2MessageIndex, _l2TxNumberInBatch); if (alreadyFinalised) { // Check that the specified message was actually sent while withdrawing eth from L2. L2Message memory l2ToL1Message = L2Message({ - txNumberInBlock: _l2TxNumberInBlock, + txNumberInBatch: _l2TxNumberInBatch, sender: L2_ETH_TOKEN_SYSTEM_CONTRACT_ADDR, data: _message }); - bool success = zkSync.proveL2MessageInclusion(_l2BlockNumber, _l2MessageIndex, l2ToL1Message, _merkleProof); + bool success = zkSync.proveL2MessageInclusion(_l2BatchNumber, _l2MessageIndex, l2ToL1Message, _merkleProof); require(success, "vq"); } else { // Finalize the withdrawal if it is not yet done. - zkSync.finalizeEthWithdrawal(_l2BlockNumber, _l2MessageIndex, _l2TxNumberInBlock, _message, _merkleProof); + zkSync.finalizeEthWithdrawal(_l2BatchNumber, _l2MessageIndex, _l2TxNumberInBatch, _message, _merkleProof); } // Wrap ETH to WETH tokens (smart contract address receives the equivalent amount of WETH) @@ -255,7 +255,7 @@ contract L1WethBridge is IL1Bridge, AllowListed, ReentrancyGuard { // Transfer WETH tokens from the smart contract address to the withdrawal receiver IERC20(l1WethAddress).safeTransfer(l1WethWithdrawReceiver, amount); - isWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex] = true; + isWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex] = true; emit WithdrawalFinalized(l1WethWithdrawReceiver, l1WethAddress, amount); } diff --git a/ethereum/contracts/bridge/interfaces/IL1Bridge.sol b/ethereum/contracts/bridge/interfaces/IL1Bridge.sol index 6349dd635a..5abca7667a 100644 --- a/ethereum/contracts/bridge/interfaces/IL1Bridge.sol +++ b/ethereum/contracts/bridge/interfaces/IL1Bridge.sol @@ -16,7 +16,7 @@ interface IL1Bridge { event ClaimedFailedDeposit(address indexed to, address indexed l1Token, uint256 amount); - function isWithdrawalFinalized(uint256 _l2BlockNumber, uint256 _l2MessageIndex) external view returns (bool); + function isWithdrawalFinalized(uint256 _l2BatchNumber, uint256 _l2MessageIndex) external view returns (bool); function deposit( address _l2Receiver, @@ -31,16 +31,16 @@ interface IL1Bridge { address _depositSender, address _l1Token, bytes32 _l2TxHash, - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes32[] calldata _merkleProof ) external; function finalizeWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes calldata _message, bytes32[] calldata _merkleProof ) external; diff --git a/ethereum/contracts/dev-contracts/test/DummyExecutor.sol b/ethereum/contracts/dev-contracts/test/DummyExecutor.sol index b27e060ca8..5671901fef 100644 --- a/ethereum/contracts/dev-contracts/test/DummyExecutor.sol +++ b/ethereum/contracts/dev-contracts/test/DummyExecutor.sol @@ -9,15 +9,15 @@ import "../../zksync/interfaces/IExecutor.sol"; contract DummyExecutor is IExecutor { address owner; - // Flags to control if the contract should revert during commit, prove, and execute blocks operations - bool shouldRevertOnCommitBlocks; - bool shouldRevertOnProveBlocks; - bool shouldRevertOnExecuteBlocks; - - // Counters to track the total number of committed, verified, and executed blocks - uint256 public getTotalBlocksCommitted; - uint256 public getTotalBlocksVerified; - uint256 public getTotalBlocksExecuted; + // Flags to control if the contract should revert during commit, prove, and execute batch operations + bool shouldRevertOnCommitBatches; + bool shouldRevertOnProveBatches; + bool shouldRevertOnExecuteBatches; + + // Counters to track the total number of committed, verified, and executed batches + uint256 public getTotalBatchesCommitted; + uint256 public getTotalBatchesVerified; + uint256 public getTotalBatchesExecuted; string public constant override getName = "DummyExecutor"; /// @notice Constructor sets the contract owner to the message sender @@ -31,83 +31,83 @@ contract DummyExecutor is IExecutor { _; } - /// @notice Allows the owner to set whether the contract should revert during commit blocks operation - function setShouldRevertOnCommitBlocks(bool _shouldRevert) external onlyOwner { - shouldRevertOnCommitBlocks = _shouldRevert; + /// @notice Allows the owner to set whether the contract should revert during commit batches operation + function setShouldRevertOnCommitBatches(bool _shouldRevert) external onlyOwner { + shouldRevertOnCommitBatches = _shouldRevert; } - /// @notice Allows the owner to set whether the contract should revert during prove blocks operation - function setShouldRevertOnProveBlocks(bool _shouldRevert) external onlyOwner { - shouldRevertOnProveBlocks = _shouldRevert; + /// @notice Allows the owner to set whether the contract should revert during prove batches operation + function setShouldRevertOnProveBatches(bool _shouldRevert) external onlyOwner { + shouldRevertOnProveBatches = _shouldRevert; } - /// @notice Allows the owner to set whether the contract should revert during execute blocks operation - function setShouldRevertOnExecuteBlocks(bool _shouldRevert) external onlyOwner { - shouldRevertOnExecuteBlocks = _shouldRevert; + /// @notice Allows the owner to set whether the contract should revert during execute batches operation + function setShouldRevertOnExecuteBatches(bool _shouldRevert) external onlyOwner { + shouldRevertOnExecuteBatches = _shouldRevert; } - function commitBlocks(StoredBlockInfo calldata _lastCommittedBlockData, CommitBlockInfo[] calldata _newBlocksData) + function commitBatches(StoredBatchInfo calldata _lastCommittedBatchData, CommitBatchInfo[] calldata _newBatchesData) external { - require(!shouldRevertOnCommitBlocks, "DummyExecutor: shouldRevertOnCommitBlocks"); + require(!shouldRevertOnCommitBatches, "DummyExecutor: shouldRevertOnCommitBatches"); require( - _lastCommittedBlockData.blockNumber == getTotalBlocksCommitted, - "DummyExecutor: Invalid last committed block number" + _lastCommittedBatchData.batchNumber == getTotalBatchesCommitted, + "DummyExecutor: Invalid last committed batch number" ); - uint256 blocksLength = _newBlocksData.length; - for (uint256 i = 0; i < blocksLength; ++i) { - require(getTotalBlocksCommitted + i + 1 == _newBlocksData[i].blockNumber); + uint256 batchesLength = _newBatchesData.length; + for (uint256 i = 0; i < batchesLength; ++i) { + require(getTotalBatchesCommitted + i + 1 == _newBatchesData[i].batchNumber); } - getTotalBlocksCommitted += blocksLength; + getTotalBatchesCommitted += batchesLength; } - function proveBlocks( - StoredBlockInfo calldata _prevBlock, - StoredBlockInfo[] calldata _committedBlocks, + function proveBatches( + StoredBatchInfo calldata _prevBatch, + StoredBatchInfo[] calldata _committedBatches, ProofInput calldata ) external { - require(!shouldRevertOnProveBlocks, "DummyExecutor: shouldRevertOnProveBlocks"); - require(_prevBlock.blockNumber == getTotalBlocksVerified, "DummyExecutor: Invalid previous block number"); + require(!shouldRevertOnProveBatches, "DummyExecutor: shouldRevertOnProveBatches"); + require(_prevBatch.batchNumber == getTotalBatchesVerified, "DummyExecutor: Invalid previous batch number"); - require(_committedBlocks.length == 1, "DummyExecutor: Can prove only one block"); + require(_committedBatches.length == 1, "DummyExecutor: Can prove only one batch"); require( - _committedBlocks[0].blockNumber == _prevBlock.blockNumber + 1, - "DummyExecutor: Can't prove block out of order" + _committedBatches[0].batchNumber == _prevBatch.batchNumber + 1, + "DummyExecutor: Can't prove batch out of order" ); - getTotalBlocksVerified += 1; + getTotalBatchesVerified += 1; require( - getTotalBlocksVerified <= getTotalBlocksCommitted, - "DummyExecutor: prove more blocks than were committed" + getTotalBatchesVerified <= getTotalBatchesCommitted, + "DummyExecutor: prove more batches than were committed" ); } - function executeBlocks(StoredBlockInfo[] calldata _blocksData) external { - require(!shouldRevertOnExecuteBlocks, "DummyExecutor: shouldRevertOnExecuteBlocks"); - uint256 nBlocks = _blocksData.length; - for (uint256 i = 0; i < nBlocks; ++i) { - require(_blocksData[i].blockNumber == getTotalBlocksExecuted + i + 1); + function executeBatches(StoredBatchInfo[] calldata _batchesData) external { + require(!shouldRevertOnExecuteBatches, "DummyExecutor: shouldRevertOnExecuteBatches"); + uint256 nBatches = _batchesData.length; + for (uint256 i = 0; i < nBatches; ++i) { + require(_batchesData[i].batchNumber == getTotalBatchesExecuted + i + 1); } - getTotalBlocksExecuted += nBlocks; + getTotalBatchesExecuted += nBatches; require( - getTotalBlocksExecuted <= getTotalBlocksVerified, - "DummyExecutor: Can't execute blocks more than committed and proven currently" + getTotalBatchesExecuted <= getTotalBatchesVerified, + "DummyExecutor: Can't execute batches more than committed and proven currently" ); } - function revertBlocks(uint256 _newLastBlock) external { + function revertBatches(uint256 _newLastBatch) external { require( - getTotalBlocksCommitted > _newLastBlock, - "DummyExecutor: The last committed block is less than new last block" + getTotalBatchesCommitted > _newLastBatch, + "DummyExecutor: The last committed batch is less than new last batch" ); - uint256 newTotalBlocksCommitted = _maxU256(_newLastBlock, getTotalBlocksExecuted); + uint256 newTotalBatchesCommitted = _maxU256(_newLastBatch, getTotalBatchesExecuted); - if (newTotalBlocksCommitted < getTotalBlocksVerified) { - getTotalBlocksVerified = newTotalBlocksCommitted; + if (newTotalBatchesCommitted < getTotalBatchesVerified) { + getTotalBatchesVerified = newTotalBatchesCommitted; } - getTotalBlocksCommitted = newTotalBlocksCommitted; + getTotalBatchesCommitted = newTotalBatchesCommitted; } /// @notice Returns larger of two values diff --git a/ethereum/contracts/upgrades/BaseZkSyncUpgrade.sol b/ethereum/contracts/upgrades/BaseZkSyncUpgrade.sol index ab6f8b7151..94357bbf85 100644 --- a/ethereum/contracts/upgrades/BaseZkSyncUpgrade.sol +++ b/ethereum/contracts/upgrades/BaseZkSyncUpgrade.sol @@ -60,9 +60,9 @@ abstract contract BaseZkSyncUpgrade is Base { /// @notice The main function that will be provided by the upgrade proxy function upgrade(ProposedUpgrade calldata _proposedUpgrade) public virtual returns (bytes32) { - // Note that due to commitment delay, the timestamp of the L2 upgrade block may be earlier than the timestamp + // Note that due to commitment delay, the timestamp of the L2 upgrade batch may be earlier than the timestamp // of the L1 block at which the upgrade occured. This means that using timestamp as a signifier of "upgraded" - // on the L2 side would be inaccurate. The effects of this "back-dating" of L2 upgrade blocks will be reduced + // on the L2 side would be inaccurate. The effects of this "back-dating" of L2 upgrade batches will be reduced // as the permitted delay window is reduced in the future. require(block.timestamp >= _proposedUpgrade.upgradeTimestamp, "Upgrade is not ready yet"); } @@ -104,9 +104,9 @@ abstract contract BaseZkSyncUpgrade is Base { /// @notice Change the address of the verifier smart contract /// @param _newVerifier Verifier smart contract address function _setVerifier(IVerifier _newVerifier) private { - // An upgrade to the verifier must be done carefully to ensure there aren't blocks in the committed state - // during the transition. If verifier is upgraded, it will immediately be used to prove all committed blocks. - // Blocks committed expecting the old verifier will fail. Ensure all commited blocks are finalized before the + // An upgrade to the verifier must be done carefully to ensure there aren't batches in the committed state + // during the transition. If verifier is upgraded, it will immediately be used to prove all committed batches. + // Batches committed expecting the old verifier will fail. Ensure all commited batches are finalized before the // verifier is upgraded. if (_newVerifier == IVerifier(address(0))) { return; @@ -149,7 +149,7 @@ abstract contract BaseZkSyncUpgrade is Base { _setL2DefaultAccountBytecodeHash(_defaultAccountHash); } - /// @notice Sets the hash of the L2 system contract upgrade transaction for the next block to be committed + /// @notice Sets the hash of the L2 system contract upgrade transaction for the next batch to be committed /// @dev If the transaction is noop (i.e. its type is 0) it does nothing and returns 0. /// @param _l2ProtocolUpgradeTx The L2 system contract upgrade transaction. /// @return System contracts upgrade transaction hash. Zero if no upgrade transaction is set. @@ -216,8 +216,8 @@ abstract contract BaseZkSyncUpgrade is Base { // If the previous upgrade had an L2 system upgrade transaction, we require that it is finalized. require(s.l2SystemContractsUpgradeTxHash == bytes32(0), "Previous upgrade has not been finalized"); require( - s.l2SystemContractsUpgradeBlockNumber == 0, - "The block number of the previous upgrade has not been cleaned" + s.l2SystemContractsUpgradeBatchNumber == 0, + "The batch number of the previous upgrade has not been cleaned" ); s.protocolVersion = _newProtocolVersion; diff --git a/ethereum/contracts/zksync/Config.sol b/ethereum/contracts/zksync/Config.sol index 0ae4291b45..cdf1d552cd 100644 --- a/ethereum/contracts/zksync/Config.sol +++ b/ethereum/contracts/zksync/Config.sol @@ -6,7 +6,7 @@ pragma solidity ^0.8.13; bytes32 constant EMPTY_STRING_KECCAK = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; /// @dev Bytes in raw L2 log -/// @dev Equal to the bytes size of the tuple - (uint8 ShardId, bool isService, uint16 txNumberInBlock, address sender, bytes32 key, bytes32 value) +/// @dev Equal to the bytes size of the tuple - (uint8 ShardId, bool isService, uint16 txNumberInBatch, address sender, bytes32 key, bytes32 value) uint256 constant L2_TO_L1_LOG_SERIALIZE_SIZE = 88; /// @dev The maximum length of the bytes array with L2 -> L1 logs @@ -66,7 +66,7 @@ uint256 constant INPUT_MASK = $$(~uint256(0) >> 8); uint256 constant L2_TX_MAX_GAS_LIMIT = $(L2_TX_MAX_GAS_LIMIT); /// @dev The maximum number of the pubdata an L2 operation should be allowed to use. -uint256 constant MAX_PUBDATA_PER_BLOCK = $(MAX_PUBDATA_PER_BLOCK); +uint256 constant MAX_PUBDATA_PER_BATCH = $(MAX_PUBDATA_PER_BATCH); /// @dev The maximum number of the pubdata an priority operation should be allowed to use. /// For now, it is somewhat lower than the maximum number of pubdata allowed for an L2 transaction, @@ -80,17 +80,17 @@ uint256 constant FAIR_L2_GAS_PRICE = $(FAIR_L2_GAS_PRICE); /// value. uint256 constant L1_GAS_PER_PUBDATA_BYTE = $(L1_GAS_PER_PUBDATA_BYTE); -/// @dev The computational overhead of processing an L2 block. -uint256 constant BLOCK_OVERHEAD_L2_GAS = $(BLOCK_OVERHEAD_L2_GAS); +/// @dev The computational overhead of processing an L2 batch. +uint256 constant BATCH_OVERHEAD_L2_GAS = $(BATCH_OVERHEAD_L2_GAS); /// @dev The overhead in L1 gas of interacting with the L1 -uint256 constant BLOCK_OVERHEAD_L1_GAS = $(BLOCK_OVERHEAD_L1_GAS); +uint256 constant BATCH_OVERHEAD_L1_GAS = $(BATCH_OVERHEAD_L1_GAS); /// @dev The equivalent in L1 pubdata of L1 gas used for working with L1 -uint256 constant BLOCK_OVERHEAD_PUBDATA = BLOCK_OVERHEAD_L1_GAS / L1_GAS_PER_PUBDATA_BYTE; +uint256 constant BATCH_OVERHEAD_PUBDATA = BATCH_OVERHEAD_L1_GAS / L1_GAS_PER_PUBDATA_BYTE; -/// @dev The maximum number of transactions in L2 block: -uint256 constant MAX_TRANSACTIONS_IN_BLOCK = $(MAX_TRANSACTIONS_IN_BLOCK); +/// @dev The maximum number of transactions in L2 batch: +uint256 constant MAX_TRANSACTIONS_IN_BATCH = $(MAX_TRANSACTIONS_IN_BATCH); /// @dev The size of the bootloader memory dedicated to the encodings of transactions uint256 constant BOOTLOADER_TX_ENCODING_SPACE = $(BOOTLOADER_TX_ENCODING_SPACE); diff --git a/ethereum/contracts/zksync/DiamondInit.sol b/ethereum/contracts/zksync/DiamondInit.sol index e5cc896409..7f6a50c526 100644 --- a/ethereum/contracts/zksync/DiamondInit.sol +++ b/ethereum/contracts/zksync/DiamondInit.sol @@ -18,9 +18,9 @@ contract DiamondInit is Base { /// @notice zkSync contract initialization /// @param _verifier address of Verifier contract /// @param _governor address who can manage the contract - /// @param _genesisBlockHash Block hash of the genesis (initial) block - /// @param _genesisIndexRepeatedStorageChanges The serial number of the shortcut storage key for genesis block - /// @param _genesisBlockCommitment The zk-proof commitment for the genesis block + /// @param _genesisBatchHash Batch hash of the genesis (initial) batch + /// @param _genesisIndexRepeatedStorageChanges The serial number of the shortcut storage key for genesis batch + /// @param _genesisBatchCommitment The zk-proof commitment for the genesis batch /// @param _allowList The address of the allow list smart contract /// @param _verifierParams Verifier config parameters that describes the circuit to be verified /// @param _zkPorterIsAvailable The availability of zk porter shard @@ -31,9 +31,9 @@ contract DiamondInit is Base { function initialize( IVerifier _verifier, address _governor, - bytes32 _genesisBlockHash, + bytes32 _genesisBatchHash, uint64 _genesisIndexRepeatedStorageChanges, - bytes32 _genesisBlockCommitment, + bytes32 _genesisBatchCommitment, IAllowList _allowList, VerifierParams calldata _verifierParams, bool _zkPorterIsAvailable, @@ -47,19 +47,19 @@ contract DiamondInit is Base { s.verifier = _verifier; s.governor = _governor; - // We need to initialize the state hash because it is used in the commitment of the next block - IExecutor.StoredBlockInfo memory storedBlockZero = IExecutor.StoredBlockInfo( + // We need to initialize the state hash because it is used in the commitment of the next batch + IExecutor.StoredBatchInfo memory storedBatchZero = IExecutor.StoredBatchInfo( 0, - _genesisBlockHash, + _genesisBatchHash, _genesisIndexRepeatedStorageChanges, 0, EMPTY_STRING_KECCAK, DEFAULT_L2_LOGS_TREE_ROOT_HASH, 0, - _genesisBlockCommitment + _genesisBatchCommitment ); - s.storedBlockHashes[0] = keccak256(abi.encode(storedBlockZero)); + s.storedBatchHashes[0] = keccak256(abi.encode(storedBatchZero)); s.allowList = _allowList; s.verifierParams = _verifierParams; s.zkPorterIsAvailable = _zkPorterIsAvailable; diff --git a/ethereum/contracts/zksync/Storage.sol b/ethereum/contracts/zksync/Storage.sol index 01160d53e9..faf8a34cb8 100644 --- a/ethereum/contracts/zksync/Storage.sol +++ b/ethereum/contracts/zksync/Storage.sol @@ -36,7 +36,7 @@ struct UpgradeStorage { /// @param l2ShardId The shard identifier, 0 - rollup, 1 - porter. All other values are not used but are reserved for the future /// @param isService A boolean flag that is part of the log along with `key`, `value`, and `sender` address. /// This field is required formally but does not have any special meaning. -/// @param txNumberInBlock The L2 transaction number in a block, in which the log was sent +/// @param txNumberInBatch The L2 transaction number in the batch, in which the log was sent /// @param sender The L2 address which sent the log /// @param key The 32 bytes of information that was sent in the log /// @param value The 32 bytes of information that was sent in the log @@ -44,7 +44,7 @@ struct UpgradeStorage { struct L2Log { uint8 l2ShardId; bool isService; - uint16 txNumberInBlock; + uint16 txNumberInBatch; address sender; bytes32 key; bytes32 value; @@ -52,11 +52,11 @@ struct L2Log { /// @dev An arbitrary length message passed from L2 /// @notice Under the hood it is `L2Log` sent from the special system L2 contract -/// @param txNumberInBlock The L2 transaction number in a block, in which the message was sent +/// @param txNumberInBatch The L2 transaction number in the batch, in which the message was sent /// @param sender The address of the L2 account from which the message was passed /// @param data An arbitrary length message struct L2Message { - uint16 txNumberInBlock; + uint16 txNumberInBatch; address sender; bytes data; } @@ -82,16 +82,16 @@ struct AppStorage { address pendingGovernor; /// @notice List of permitted validators mapping(address => bool) validators; - /// @dev Verifier contract. Used to verify aggregated proof for blocks + /// @dev Verifier contract. Used to verify aggregated proof for batches IVerifier verifier; - /// @notice Total number of executed blocks i.e. blocks[totalBlocksExecuted] points at the latest executed block (block 0 is genesis) - uint256 totalBlocksExecuted; - /// @notice Total number of proved blocks i.e. blocks[totalBlocksProved] points at the latest proved block - uint256 totalBlocksVerified; - /// @notice Total number of committed blocks i.e. blocks[totalBlocksCommitted] points at the latest committed block - uint256 totalBlocksCommitted; - /// @dev Stored hashed StoredBlock for block number - mapping(uint256 => bytes32) storedBlockHashes; + /// @notice Total number of executed batches i.e. batches[totalBatchesExecuted] points at the latest executed batch (batch 0 is genesis) + uint256 totalBatchesExecuted; + /// @notice Total number of proved batches i.e. batches[totalBatchesProved] points at the latest proved batch + uint256 totalBatchesVerified; + /// @notice Total number of committed batches i.e. batches[totalBatchesCommitted] points at the latest committed batch + uint256 totalBatchesCommitted; + /// @dev Stored hashed StoredBatch for batch number + mapping(uint256 => bytes32) storedBatchHashes; /// @dev Stored root hashes of L2 -> L1 logs mapping(uint256 => bytes32) l2LogsRootHashes; /// @dev Container that stores transactions requested from L1 @@ -111,11 +111,11 @@ struct AppStorage { bool zkPorterIsAvailable; /// @dev The maximum number of the L2 gas that a user can request for L1 -> L2 transactions /// @dev This is the maximum number of L2 gas that is available for the "body" of the transaction, i.e. - /// without overhead for proving the block. + /// without overhead for proving the batch. uint256 priorityTxMaxGasLimit; /// @dev Storage of variables needed for upgrade facet UpgradeStorage upgrades; - /// @dev A mapping L2 block number => message number => flag. + /// @dev A mapping L2 batch number => message number => flag. /// @dev The L2 -> L1 log is sent for every withdrawal, so this mapping is serving as /// a flag to indicate that the message was already processed. /// @dev Used to indicate that eth withdrawal was already processed @@ -131,6 +131,6 @@ struct AppStorage { uint256 protocolVersion; /// @dev Hash of the system contract upgrade transaction. If 0, then no upgrade transaction needs to be done. bytes32 l2SystemContractsUpgradeTxHash; - /// @dev Block number where the upgrade transaction has happened. If 0, then no upgrade transaction has happened yet. - uint256 l2SystemContractsUpgradeBlockNumber; + /// @dev Batch number where the upgrade transaction has happened. If 0, then no upgrade transaction has happened yet. + uint256 l2SystemContractsUpgradeBatchNumber; } diff --git a/ethereum/contracts/zksync/ValidatorTimelock.sol b/ethereum/contracts/zksync/ValidatorTimelock.sol index 7d13f9d83c..d47e96f1c7 100644 --- a/ethereum/contracts/zksync/ValidatorTimelock.sol +++ b/ethereum/contracts/zksync/ValidatorTimelock.sol @@ -7,18 +7,18 @@ import "./interfaces/IExecutor.sol"; /// @author Matter Labs /// @notice Intermediate smart contract between the validator EOA account and the zkSync smart contract. -/// @dev The primary purpose of this contract is to provide a trustless means of delaying block execution without +/// @dev The primary purpose of this contract is to provide a trustless means of delaying batch execution without /// modifying the main zkSync contract. As such, even if this contract is compromised, it will not impact the main contract. /// @dev zkSync actively monitors the chain activity and reacts to any suspicious activity by freezing the chain. /// This allows time for investigation and mitigation before resuming normal operations. -/// @dev The contract overloads all of the 4 methods, that are used in state transition. When the block is committed, the -/// timestamp is stored for it. Later, when the owner calls the block execution, the contract checks that block +/// @dev The contract overloads all of the 4 methods, that are used in state transition. When the batch is committed, the +/// timestamp is stored for it. Later, when the owner calls the batch execution, the contract checks that batch /// was committed not earlier than X time ago. contract ValidatorTimelock is IExecutor, Ownable2Step { /// @dev Part of the IBase interface. Not used in this contract. string public constant override getName = "ValidatorTimelock"; - /// @notice The delay between committing and executing blocks is changed. + /// @notice The delay between committing and executing batches is changed. event NewExecutionDelay(uint256 _newExecutionDelay); /// @notice The validator address is changed. @@ -27,13 +27,13 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { /// @dev The main zkSync smart contract. address public immutable zkSyncContract; - /// @dev The mapping of L2 block number => timestamp when it was commited. - mapping(uint256 => uint256) public committedBlockTimestamp; + /// @dev The mapping of L2 batch number => timestamp when it was commited. + mapping(uint256 => uint256) public committedBatchTimestamp; - /// @dev The address that can commit/revert/validate/execute blocks. + /// @dev The address that can commit/revert/validate/execute batches. address public validator; - /// @dev The delay between committing and executing blocks. + /// @dev The delay between committing and executing batches. uint256 public executionDelay; constructor( @@ -55,7 +55,7 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { emit NewValidator(oldValidator, _newValidator); } - /// @dev Set the delay between committing and executing blocks. + /// @dev Set the delay between committing and executing batches. function setExecutionDelay(uint256 _executionDelay) external onlyOwner { executionDelay = _executionDelay; emit NewExecutionDelay(_executionDelay); @@ -67,46 +67,49 @@ contract ValidatorTimelock is IExecutor, Ownable2Step { _; } - /// @dev Records the timestamp for all provided committed blocks and make + /// @dev Records the timestamp for all provided committed batches and make /// a call to the zkSync contract with the same calldata. - function commitBlocks(StoredBlockInfo calldata, CommitBlockInfo[] calldata _newBlocksData) external onlyValidator { - for (uint256 i = 0; i < _newBlocksData.length; ++i) { - committedBlockTimestamp[_newBlocksData[i].blockNumber] = block.timestamp; + function commitBatches(StoredBatchInfo calldata, CommitBatchInfo[] calldata _newBatchesData) + external + onlyValidator + { + for (uint256 i = 0; i < _newBatchesData.length; ++i) { + committedBatchTimestamp[_newBatchesData[i].batchNumber] = block.timestamp; } _propagateToZkSync(); } /// @dev Make a call to the zkSync contract with the same calldata. - /// Note: If the block is reverted, it needs to be committed first before the execution. - /// So it's safe to not override the committed blocks. - function revertBlocks(uint256) external onlyValidator { + /// Note: If the batch is reverted, it needs to be committed first before the execution. + /// So it's safe to not override the committed batches. + function revertBatches(uint256) external onlyValidator { _propagateToZkSync(); } /// @dev Make a call to the zkSync contract with the same calldata. - /// Note: We don't track the time when blocks are proven, since all information about - /// the block is known on the commit stage and the proved is not finalized (may be reverted). - function proveBlocks( - StoredBlockInfo calldata, - StoredBlockInfo[] calldata, + /// Note: We don't track the time when batches are proven, since all information about + /// the batch is known on the commit stage and the proved is not finalized (may be reverted). + function proveBatches( + StoredBatchInfo calldata, + StoredBatchInfo[] calldata, ProofInput calldata ) external onlyValidator { _propagateToZkSync(); } - /// @dev Check that blocks were committed at least X time ago and + /// @dev Check that batches were committed at least X time ago and /// make a call to the zkSync contract with the same calldata. - function executeBlocks(StoredBlockInfo[] calldata _newBlocksData) external onlyValidator { - for (uint256 i = 0; i < _newBlocksData.length; ++i) { - uint256 commitBlockTimestamp = committedBlockTimestamp[_newBlocksData[i].blockNumber]; + function executeBatches(StoredBatchInfo[] calldata _newBatchesData) external onlyValidator { + for (uint256 i = 0; i < _newBatchesData.length; ++i) { + uint256 commitBatchTimestamp = committedBatchTimestamp[_newBatchesData[i].batchNumber]; - // Note: if the `commitBlockTimestamp` is zero, that means either: - // * The block was committed, but not though this contract. - // * The block wasn't committed at all, so execution will fail in the zkSync contract. - // We allow executing such blocks. + // Note: if the `commitBatchTimestamp` is zero, that means either: + // * The batch was committed, but not though this contract. + // * The batch wasn't committed at all, so execution will fail in the zkSync contract. + // We allow executing such batches. - require(block.timestamp > commitBlockTimestamp + executionDelay, "5c"); // The delay is not passed + require(block.timestamp > commitBatchTimestamp + executionDelay, "5c"); // The delay is not passed } _propagateToZkSync(); diff --git a/ethereum/contracts/zksync/facets/Executor.sol b/ethereum/contracts/zksync/facets/Executor.sol index e9f7a8156a..12c4be97f6 100644 --- a/ethereum/contracts/zksync/facets/Executor.sol +++ b/ethereum/contracts/zksync/facets/Executor.sol @@ -20,57 +20,57 @@ contract ExecutorFacet is Base, IExecutor { string public constant override getName = "ExecutorFacet"; - /// @dev Process one block commit using the previous block StoredBlockInfo - /// @dev returns new block StoredBlockInfo + /// @dev Process one batch commit using the previous batch StoredBatchInfo + /// @dev returns new batch StoredBatchInfo /// @notice Does not change storage - function _commitOneBlock( - StoredBlockInfo memory _previousBlock, - CommitBlockInfo calldata _newBlock, + function _commitOneBatch( + StoredBatchInfo memory _previousBatch, + CommitBatchInfo calldata _newBatch, bytes32 _expectedSystemContractUpgradeTxHash - ) internal view returns (StoredBlockInfo memory) { - require(_newBlock.blockNumber == _previousBlock.blockNumber + 1, "f"); // only commit next block + ) internal view returns (StoredBatchInfo memory) { + require(_newBatch.batchNumber == _previousBatch.batchNumber + 1, "f"); // only commit next batch - // Check that block contain all meta information for L2 logs. + // Check that batch contain all meta information for L2 logs. // Get the chained hash of priority transaction hashes. ( uint256 expectedNumberOfLayer1Txs, bytes32 expectedPriorityOperationsHash, - bytes32 previousBlockHash, + bytes32 previousBatchHash, bytes32 stateDiffHash, bytes32 l2LogsTreeRoot, uint256 packedBatchAndL2BlockTimestamp - ) = _processL2Logs(_newBlock, _expectedSystemContractUpgradeTxHash); + ) = _processL2Logs(_newBatch, _expectedSystemContractUpgradeTxHash); - require(_previousBlock.blockHash == previousBlockHash, "l"); + require(_previousBatch.batchHash == previousBatchHash, "l"); // Check that the priority operation hash in the L2 logs is as expected - require(expectedPriorityOperationsHash == _newBlock.priorityOperationsHash, "t"); + require(expectedPriorityOperationsHash == _newBatch.priorityOperationsHash, "t"); // Check that the number of processed priority operations is as expected - require(expectedNumberOfLayer1Txs == _newBlock.numberOfLayer1Txs, "ta"); + require(expectedNumberOfLayer1Txs == _newBatch.numberOfLayer1Txs, "ta"); - // Check the timestamp of the new block - _verifyBlockTimestamp(packedBatchAndL2BlockTimestamp, _newBlock.timestamp, _previousBlock.timestamp); + // Check the timestamp of the new batch + _verifyBatchTimestamp(packedBatchAndL2BlockTimestamp, _newBatch.timestamp, _previousBatch.timestamp); - // Create block commitment for the proof verification - bytes32 commitment = _createBlockCommitment(_newBlock, stateDiffHash); + // Create batch commitment for the proof verification + bytes32 commitment = _createBatchCommitment(_newBatch, stateDiffHash); return - StoredBlockInfo( - _newBlock.blockNumber, - _newBlock.newStateRoot, - _newBlock.indexRepeatedStorageChanges, - _newBlock.numberOfLayer1Txs, - _newBlock.priorityOperationsHash, + StoredBatchInfo( + _newBatch.batchNumber, + _newBatch.newStateRoot, + _newBatch.indexRepeatedStorageChanges, + _newBatch.numberOfLayer1Txs, + _newBatch.priorityOperationsHash, l2LogsTreeRoot, - _newBlock.timestamp, + _newBatch.timestamp, commitment ); } /// @notice checks that the timestamps of both the new batch and the new L2 block are correct. - /// @param _packedBatchAndL2BlockTimestamp - packed batch and L2 block timestamp in a foramt of batchTimestamp * 2**128 + l2BlockTimestamp + /// @param _packedBatchAndL2BlockTimestamp - packed batch and L2 block timestamp in a format of batchTimestamp * 2**128 + l2BatchTimestamp /// @param _expectedBatchTimestamp - expected batch timestamp /// @param _previousBatchTimestamp - the timestamp of the previous batch - function _verifyBlockTimestamp( + function _verifyBatchTimestamp( uint256 _packedBatchAndL2BlockTimestamp, uint256 _expectedBatchTimestamp, uint256 _previousBatchTimestamp @@ -85,7 +85,7 @@ contract ExecutorFacet is Base, IExecutor { uint256 lastL2BlockTimestamp = _packedBatchAndL2BlockTimestamp & PACKED_L2_BLOCK_TIMESTAMP_MASK; - // On L2, all blocks have timestamps within the range of [batchTimestamp, lastL2BlockTimestamp]. + // All L2 blocks have timestamps within the range of [batchTimestamp, lastL2BatchTimestamp]. // So here we need to only double check that: // - The timestamp of the batch is not too small. // - The timestamp of the last L2 block is not too big. @@ -93,30 +93,30 @@ contract ExecutorFacet is Base, IExecutor { require(lastL2BlockTimestamp <= block.timestamp + COMMIT_TIMESTAMP_APPROXIMATION_DELTA, "h2"); // The last L2 block timestamp is too big } - /// @dev Check that L2 logs are proper and block contain all meta information for them + /// @dev Check that L2 logs are proper and batch contain all meta information for them /// @dev The logs processed here should line up such that only one log for each key from the - /// SystemLogKey enum in Constants.sol is processed per new block. - /// @dev Data returned from here will be used to form the block commitment. - function _processL2Logs(CommitBlockInfo calldata _newBlock, bytes32 _expectedSystemContractUpgradeTxHash) + /// SystemLogKey enum in Constants.sol is processed per new batch. + /// @dev Data returned from here will be used to form the batch commitment. + function _processL2Logs(CommitBatchInfo calldata _newBatch, bytes32 _expectedSystemContractUpgradeTxHash) internal pure returns ( uint256 numberOfLayer1Txs, bytes32 chainedPriorityTxsHash, - bytes32 previousBlockHash, + bytes32 previousBatchHash, bytes32 stateDiffHash, bytes32 l2LogsTreeRoot, uint256 packedBatchAndL2BlockTimestamp ) { // Copy L2 to L1 logs into memory. - bytes memory emittedL2Logs = _newBlock.systemLogs[4:]; + bytes memory emittedL2Logs = _newBatch.systemLogs[4:]; // Used as bitmap to set/check log processing happens exactly once. // See SystemLogKey enum in Constants.sol for ordering. uint256 processedLogs; - bytes32 providedL2ToL1PubdataHash = keccak256(_newBlock.totalL2ToL1Pubdata); + bytes32 providedL2ToL1PubdataHash = keccak256(_newBatch.totalL2ToL1Pubdata); // linear traversal of the logs for (uint256 i = 0; i < emittedL2Logs.length; i = i.uncheckedAdd(L2_TO_L1_LOG_SERIALIZE_SIZE)) { @@ -142,9 +142,9 @@ contract ExecutorFacet is Base, IExecutor { } else if (logKey == uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)) { require(logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, "sc"); packedBatchAndL2BlockTimestamp = uint256(logValue); - } else if (logKey == uint256(SystemLogKey.PREV_BLOCK_HASH_KEY)) { + } else if (logKey == uint256(SystemLogKey.PREV_BATCH_HASH_KEY)) { require(logSender == L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, "sv"); - previousBlockHash = logValue; + previousBatchHash = logValue; } else if (logKey == uint256(SystemLogKey.CHAINED_PRIORITY_TXN_HASH_KEY)) { require(logSender == L2_BOOTLOADER_ADDRESS, "bl"); chainedPriorityTxsHash = logValue; @@ -169,88 +169,88 @@ contract ExecutorFacet is Base, IExecutor { } } - /// @notice Commit block + /// @notice Commit batch /// @notice 1. Checks timestamp. /// @notice 2. Process L2 logs. - /// @notice 3. Store block commitments. - function commitBlocks(StoredBlockInfo memory _lastCommittedBlockData, CommitBlockInfo[] calldata _newBlocksData) + /// @notice 3. Store batch commitments. + function commitBatches(StoredBatchInfo memory _lastCommittedBatchData, CommitBatchInfo[] calldata _newBatchesData) external override nonReentrant onlyValidator { - // Check that we commit blocks after last committed block - require(s.storedBlockHashes[s.totalBlocksCommitted] == _hashStoredBlockInfo(_lastCommittedBlockData), "i"); // incorrect previous block data - require(_newBlocksData.length > 0, "No blocks to commit"); + // Check that we commit batches after last committed batch + require(s.storedBatchHashes[s.totalBatchesCommitted] == _hashStoredBatchInfo(_lastCommittedBatchData), "i"); // incorrect previous batch data + require(_newBatchesData.length > 0, "No batches to commit"); bytes32 systemContractsUpgradeTxHash = s.l2SystemContractsUpgradeTxHash; // Upgrades are rarely done so we optimize a case with no active system contracts upgrade. - if (systemContractsUpgradeTxHash == bytes32(0) || s.l2SystemContractsUpgradeBlockNumber != 0) { - _commitBlocksWithoutSystemContractsUpgrade(_lastCommittedBlockData, _newBlocksData); + if (systemContractsUpgradeTxHash == bytes32(0) || s.l2SystemContractsUpgradeBatchNumber != 0) { + _commitBatchesWithoutSystemContractsUpgrade(_lastCommittedBatchData, _newBatchesData); } else { - _commitBlocksWithSystemContractsUpgrade( - _lastCommittedBlockData, - _newBlocksData, + _commitBatchesWithSystemContractsUpgrade( + _lastCommittedBatchData, + _newBatchesData, systemContractsUpgradeTxHash ); } - s.totalBlocksCommitted = s.totalBlocksCommitted + _newBlocksData.length; + s.totalBatchesCommitted = s.totalBatchesCommitted + _newBatchesData.length; } - /// @dev Commits new blocks without any system contracts upgrade. - /// @param _lastCommittedBlockData The data of the last committed block. - /// @param _newBlocksData An array of block data that needs to be committed. - function _commitBlocksWithoutSystemContractsUpgrade( - StoredBlockInfo memory _lastCommittedBlockData, - CommitBlockInfo[] calldata _newBlocksData + /// @dev Commits new batches without any system contracts upgrade. + /// @param _lastCommittedBatchData The data of the last committed batch. + /// @param _newBatchesData An array of batch data that needs to be committed. + function _commitBatchesWithoutSystemContractsUpgrade( + StoredBatchInfo memory _lastCommittedBatchData, + CommitBatchInfo[] calldata _newBatchesData ) internal { - for (uint256 i = 0; i < _newBlocksData.length; i = i.uncheckedInc()) { - _lastCommittedBlockData = _commitOneBlock(_lastCommittedBlockData, _newBlocksData[i], bytes32(0)); + for (uint256 i = 0; i < _newBatchesData.length; i = i.uncheckedInc()) { + _lastCommittedBatchData = _commitOneBatch(_lastCommittedBatchData, _newBatchesData[i], bytes32(0)); - s.storedBlockHashes[_lastCommittedBlockData.blockNumber] = _hashStoredBlockInfo(_lastCommittedBlockData); + s.storedBatchHashes[_lastCommittedBatchData.batchNumber] = _hashStoredBatchInfo(_lastCommittedBatchData); emit BlockCommit( - _lastCommittedBlockData.blockNumber, - _lastCommittedBlockData.blockHash, - _lastCommittedBlockData.commitment + _lastCommittedBatchData.batchNumber, + _lastCommittedBatchData.batchHash, + _lastCommittedBatchData.commitment ); } } - /// @dev Commits new blocks with a system contracts upgrade transaction. - /// @param _lastCommittedBlockData The data of the last committed block. - /// @param _newBlocksData An array of block data that needs to be committed. + /// @dev Commits new batches with a system contracts upgrade transaction. + /// @param _lastCommittedBatchData The data of the last committed batch. + /// @param _newBatchesData An array of batch data that needs to be committed. /// @param _systemContractUpgradeTxHash The transaction hash of the system contract upgrade. - function _commitBlocksWithSystemContractsUpgrade( - StoredBlockInfo memory _lastCommittedBlockData, - CommitBlockInfo[] calldata _newBlocksData, + function _commitBatchesWithSystemContractsUpgrade( + StoredBatchInfo memory _lastCommittedBatchData, + CommitBatchInfo[] calldata _newBatchesData, bytes32 _systemContractUpgradeTxHash ) internal { // The system contract upgrade is designed to be executed atomically with the new bootloader, a default account, // ZKP verifier, and other system parameters. Hence, we ensure that the upgrade transaction is - // carried out within the first block committed after the upgrade. + // carried out within the first batch committed after the upgrade. - // While the logic of the contract ensures that the s.l2SystemContractsUpgradeBlockNumber is 0 when this function is called, + // While the logic of the contract ensures that the s.l2SystemContractsUpgradeBatchNumber is 0 when this function is called, // this check is added just in case. Since it is a hot read, it does not encure noticable gas cost. - require(s.l2SystemContractsUpgradeBlockNumber == 0, "ik"); + require(s.l2SystemContractsUpgradeBatchNumber == 0, "ik"); - // Save the block number where the upgrade transaction was executed. - s.l2SystemContractsUpgradeBlockNumber = _newBlocksData[0].blockNumber; + // Save the batch number where the upgrade transaction was executed. + s.l2SystemContractsUpgradeBatchNumber = _newBatchesData[0].batchNumber; - for (uint256 i = 0; i < _newBlocksData.length; i = i.uncheckedInc()) { - // The upgrade transaction must only be included in the first block. + for (uint256 i = 0; i < _newBatchesData.length; i = i.uncheckedInc()) { + // The upgrade transaction must only be included in the first batch. bytes32 expectedUpgradeTxHash = i == 0 ? _systemContractUpgradeTxHash : bytes32(0); - _lastCommittedBlockData = _commitOneBlock( - _lastCommittedBlockData, - _newBlocksData[i], + _lastCommittedBatchData = _commitOneBatch( + _lastCommittedBatchData, + _newBatchesData[i], expectedUpgradeTxHash ); - s.storedBlockHashes[_lastCommittedBlockData.blockNumber] = _hashStoredBlockInfo(_lastCommittedBlockData); + s.storedBatchHashes[_lastCommittedBatchData.batchNumber] = _hashStoredBatchInfo(_lastCommittedBatchData); emit BlockCommit( - _lastCommittedBlockData.blockNumber, - _lastCommittedBlockData.blockHash, - _lastCommittedBlockData.commitment + _lastCommittedBatchData.batchNumber, + _lastCommittedBatchData.batchHash, + _lastCommittedBatchData.commitment ); } } @@ -265,82 +265,85 @@ contract ExecutorFacet is Base, IExecutor { } } - /// @dev Executes one block + /// @dev Executes one batch /// @dev 1. Processes all pending operations (Complete priority requests) - /// @dev 2. Finalizes block on Ethereum - /// @dev _executedBlockIdx is an index in the array of the blocks that we want to execute together - function _executeOneBlock(StoredBlockInfo memory _storedBlock, uint256 _executedBlockIdx) internal { - uint256 currentBlockNumber = _storedBlock.blockNumber; - require(currentBlockNumber == s.totalBlocksExecuted + _executedBlockIdx + 1, "k"); // Execute blocks in order + /// @dev 2. Finalizes batch on Ethereum + /// @dev _executedBatchIdx is an index in the array of the batches that we want to execute together + function _executeOneBatch(StoredBatchInfo memory _storedBatch, uint256 _executedBatchIdx) internal { + uint256 currentBatchNumber = _storedBatch.batchNumber; + require(currentBatchNumber == s.totalBatchesExecuted + _executedBatchIdx + 1, "k"); // Execute batches in order require( - _hashStoredBlockInfo(_storedBlock) == s.storedBlockHashes[currentBlockNumber], - "exe10" // executing block should be committed + _hashStoredBatchInfo(_storedBatch) == s.storedBatchHashes[currentBatchNumber], + "exe10" // executing batch should be committed ); - bytes32 priorityOperationsHash = _collectOperationsFromPriorityQueue(_storedBlock.numberOfLayer1Txs); - require(priorityOperationsHash == _storedBlock.priorityOperationsHash, "x"); // priority operations hash does not match to expected + bytes32 priorityOperationsHash = _collectOperationsFromPriorityQueue(_storedBatch.numberOfLayer1Txs); + require(priorityOperationsHash == _storedBatch.priorityOperationsHash, "x"); // priority operations hash does not match to expected // Save root hash of L2 -> L1 logs tree - s.l2LogsRootHashes[currentBlockNumber] = _storedBlock.l2LogsTreeRoot; + s.l2LogsRootHashes[currentBatchNumber] = _storedBatch.l2LogsTreeRoot; } - /// @notice Execute blocks, complete priority operations and process withdrawals. + /// @notice Execute batches, complete priority operations and process withdrawals. /// @notice 1. Processes all pending operations (Complete priority requests) - /// @notice 2. Finalizes block on Ethereum - function executeBlocks(StoredBlockInfo[] calldata _blocksData) external nonReentrant onlyValidator { - uint256 nBlocks = _blocksData.length; - for (uint256 i = 0; i < nBlocks; i = i.uncheckedInc()) { - _executeOneBlock(_blocksData[i], i); - emit BlockExecution(_blocksData[i].blockNumber, _blocksData[i].blockHash, _blocksData[i].commitment); + /// @notice 2. Finalizes batch on Ethereum + function executeBatches(StoredBatchInfo[] calldata _batchesData) external nonReentrant onlyValidator { + uint256 nBatches = _batchesData.length; + for (uint256 i = 0; i < nBatches; i = i.uncheckedInc()) { + _executeOneBatch(_batchesData[i], i); + emit BlockExecution(_batchesData[i].batchNumber, _batchesData[i].batchHash, _batchesData[i].commitment); } - uint256 newTotalBlocksExecuted = s.totalBlocksExecuted + nBlocks; - s.totalBlocksExecuted = newTotalBlocksExecuted; - require(newTotalBlocksExecuted <= s.totalBlocksVerified, "n"); // Can't execute blocks more than committed and proven currently. + uint256 newTotalBatchesExecuted = s.totalBatchesExecuted + nBatches; + s.totalBatchesExecuted = newTotalBatchesExecuted; + require(newTotalBatchesExecuted <= s.totalBatchesVerified, "n"); // Can't execute batches more than committed and proven currently. - uint256 blockWhenUpgradeHappened = s.l2SystemContractsUpgradeBlockNumber; - if (blockWhenUpgradeHappened != 0 && blockWhenUpgradeHappened <= newTotalBlocksExecuted) { + uint256 batchWhenUpgradeHappened = s.l2SystemContractsUpgradeBatchNumber; + if (batchWhenUpgradeHappened != 0 && batchWhenUpgradeHappened <= newTotalBatchesExecuted) { delete s.l2SystemContractsUpgradeTxHash; - delete s.l2SystemContractsUpgradeBlockNumber; + delete s.l2SystemContractsUpgradeBatchNumber; } } - /// @notice Blocks commitment verification. - /// @notice Only verifies block commitments without any other processing - function proveBlocks( - StoredBlockInfo calldata _prevBlock, - StoredBlockInfo[] calldata _committedBlocks, + /// @notice Batches commitment verification. + /// @notice Only verifies batch commitments without any other processing + function proveBatches( + StoredBatchInfo calldata _prevBatch, + StoredBatchInfo[] calldata _committedBatches, ProofInput calldata _proof ) external nonReentrant onlyValidator { // Save the variables into the stack to save gas on reading them later - uint256 currentTotalBlocksVerified = s.totalBlocksVerified; - uint256 committedBlocksLength = _committedBlocks.length; + uint256 currentTotalBatchesVerified = s.totalBatchesVerified; + uint256 committedBatchesLength = _committedBatches.length; // Save the variable from the storage to memory to save gas VerifierParams memory verifierParams = s.verifierParams; // Initialize the array, that will be used as public input to the ZKP - uint256[] memory proofPublicInput = new uint256[](committedBlocksLength); + uint256[] memory proofPublicInput = new uint256[](committedBatchesLength); - // Check that the block passed by the validator is indeed the first unverified block - require(_hashStoredBlockInfo(_prevBlock) == s.storedBlockHashes[currentTotalBlocksVerified], "t1"); + // Check that the batch passed by the validator is indeed the first unverified batch + require(_hashStoredBatchInfo(_prevBatch) == s.storedBatchHashes[currentTotalBatchesVerified], "t1"); - bytes32 prevBlockCommitment = _prevBlock.commitment; - for (uint256 i = 0; i < committedBlocksLength; i = i.uncheckedInc()) { - currentTotalBlocksVerified = currentTotalBlocksVerified.uncheckedInc(); - require(_hashStoredBlockInfo(_committedBlocks[i]) == s.storedBlockHashes[currentTotalBlocksVerified], "o1"); + bytes32 prevBatchCommitment = _prevBatch.commitment; + for (uint256 i = 0; i < committedBatchesLength; i = i.uncheckedInc()) { + currentTotalBatchesVerified = currentTotalBatchesVerified.uncheckedInc(); + require( + _hashStoredBatchInfo(_committedBatches[i]) == s.storedBatchHashes[currentTotalBatchesVerified], + "o1" + ); - bytes32 currentBlockCommitment = _committedBlocks[i].commitment; - proofPublicInput[i] = _getBlockProofPublicInput( - prevBlockCommitment, - currentBlockCommitment, + bytes32 currentBatchCommitment = _committedBatches[i].commitment; + proofPublicInput[i] = _getBatchProofPublicInput( + prevBatchCommitment, + currentBatchCommitment, _proof, verifierParams ); - prevBlockCommitment = currentBlockCommitment; + prevBatchCommitment = currentBatchCommitment; } - require(currentTotalBlocksVerified <= s.totalBlocksCommitted, "q"); + require(currentTotalBatchesVerified <= s.totalBatchesCommitted, "q"); // #if DUMMY_VERIFIER @@ -365,14 +368,14 @@ contract ExecutorFacet is Base, IExecutor { require(successVerifyProof, "p"); // Proof verification fail // #endif - emit BlocksVerification(s.totalBlocksVerified, currentTotalBlocksVerified); - s.totalBlocksVerified = currentTotalBlocksVerified; + emit BlocksVerification(s.totalBatchesVerified, currentTotalBatchesVerified); + s.totalBatchesVerified = currentTotalBatchesVerified; } /// @dev Gets zk proof public input - function _getBlockProofPublicInput( - bytes32 _prevBlockCommitment, - bytes32 _currentBlockCommitment, + function _getBatchProofPublicInput( + bytes32 _prevBatchCommitment, + bytes32 _currentBatchCommitment, ProofInput calldata _proof, VerifierParams memory _verifierParams ) internal pure returns (uint256) { @@ -380,8 +383,8 @@ contract ExecutorFacet is Base, IExecutor { uint256( keccak256( abi.encodePacked( - _prevBlockCommitment, - _currentBlockCommitment, + _prevBatchCommitment, + _currentBatchCommitment, _verifierParams.recursionNodeLevelVkHash, _verifierParams.recursionLeafLevelVkHash ) @@ -389,26 +392,26 @@ contract ExecutorFacet is Base, IExecutor { ) & INPUT_MASK; } - /// @notice Reverts unexecuted blocks - /// @param _newLastBlock block number after which blocks should be reverted - /// NOTE: Doesn't delete the stored data about blocks, but only decreases - /// counters that are responsible for the number of blocks - function revertBlocks(uint256 _newLastBlock) external nonReentrant onlyValidator { - require(s.totalBlocksCommitted > _newLastBlock, "v1"); // The last committed block is less than new last block - uint256 newTotalBlocksCommitted = _maxU256(_newLastBlock, s.totalBlocksExecuted); + /// @notice Reverts unexecuted batches + /// @param _newLastBatch batch number after which batches should be reverted + /// NOTE: Doesn't delete the stored data about batches, but only decreases + /// counters that are responsible for the number of batches + function revertBatches(uint256 _newLastBatch) external nonReentrant onlyValidator { + require(s.totalBatchesCommitted > _newLastBatch, "v1"); // The last committed batch is less than new last batch + uint256 newTotalBatchesCommitted = _maxU256(_newLastBatch, s.totalBatchesExecuted); - if (newTotalBlocksCommitted < s.totalBlocksVerified) { - s.totalBlocksVerified = newTotalBlocksCommitted; + if (newTotalBatchesCommitted < s.totalBatchesVerified) { + s.totalBatchesVerified = newTotalBatchesCommitted; } - s.totalBlocksCommitted = newTotalBlocksCommitted; + s.totalBatchesCommitted = newTotalBatchesCommitted; - // Reset the block number of the executed system contracts upgrade transaction if the block - // where the system contracts upgrade was committed is among the reverted blocks. - if (s.l2SystemContractsUpgradeBlockNumber > newTotalBlocksCommitted) { - delete s.l2SystemContractsUpgradeBlockNumber; + // Reset the batch number of the executed system contracts upgrade transaction if the batch + // where the system contracts upgrade was committed is among the reverted batches. + if (s.l2SystemContractsUpgradeBatchNumber > newTotalBatchesCommitted) { + delete s.l2SystemContractsUpgradeBatchNumber; } - emit BlocksRevert(s.totalBlocksCommitted, s.totalBlocksVerified, s.totalBlocksExecuted); + emit BlocksRevert(s.totalBatchesCommitted, s.totalBatchesVerified, s.totalBatchesExecuted); } /// @notice Returns larger of two values @@ -416,54 +419,54 @@ contract ExecutorFacet is Base, IExecutor { return a < b ? b : a; } - /// @dev Creates block commitment from its data - function _createBlockCommitment(CommitBlockInfo calldata _newBlockData, bytes32 _stateDiffHash) + /// @dev Creates batch commitment from its data + function _createBatchCommitment(CommitBatchInfo calldata _newBatchData, bytes32 _stateDiffHash) internal view returns (bytes32) { - bytes32 passThroughDataHash = keccak256(_blockPassThroughData(_newBlockData)); - bytes32 metadataHash = keccak256(_blockMetaParameters()); - bytes32 auxiliaryOutputHash = keccak256(_blockAuxiliaryOutput(_newBlockData, _stateDiffHash)); + bytes32 passThroughDataHash = keccak256(_batchPassThroughData(_newBatchData)); + bytes32 metadataHash = keccak256(_batchMetaParameters()); + bytes32 auxiliaryOutputHash = keccak256(_batchAuxiliaryOutput(_newBatchData, _stateDiffHash)); return keccak256(abi.encode(passThroughDataHash, metadataHash, auxiliaryOutputHash)); } - function _blockPassThroughData(CommitBlockInfo calldata _block) internal pure returns (bytes memory) { + function _batchPassThroughData(CommitBatchInfo calldata _batch) internal pure returns (bytes memory) { return abi.encodePacked( - _block.indexRepeatedStorageChanges, - _block.newStateRoot, + _batch.indexRepeatedStorageChanges, + _batch.newStateRoot, uint64(0), // index repeated storage changes in zkPorter - bytes32(0) // zkPorter block hash + bytes32(0) // zkPorter batch hash ); } - function _blockMetaParameters() internal view returns (bytes memory) { + function _batchMetaParameters() internal view returns (bytes memory) { return abi.encodePacked(s.zkPorterIsAvailable, s.l2BootloaderBytecodeHash, s.l2DefaultAccountBytecodeHash); } - function _blockAuxiliaryOutput(CommitBlockInfo calldata _block, bytes32 _stateDiffHash) + function _batchAuxiliaryOutput(CommitBatchInfo calldata _batch, bytes32 _stateDiffHash) internal pure returns (bytes memory) { - require(_block.systemLogs.length <= MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, "pu"); + require(_batch.systemLogs.length <= MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, "pu"); - bytes32 l2ToL1LogsHash = keccak256(_block.systemLogs); + bytes32 l2ToL1LogsHash = keccak256(_batch.systemLogs); return abi.encode( l2ToL1LogsHash, _stateDiffHash, - _block.bootloaderHeapInitialContentsHash, - _block.eventsQueueStateHash + _batch.bootloaderHeapInitialContentsHash, + _batch.eventsQueueStateHash ); } - /// @notice Returns the keccak hash of the ABI-encoded StoredBlockInfo - function _hashStoredBlockInfo(StoredBlockInfo memory _storedBlockInfo) internal pure returns (bytes32) { - return keccak256(abi.encode(_storedBlockInfo)); + /// @notice Returns the keccak hash of the ABI-encoded StoredBatchInfo + function _hashStoredBatchInfo(StoredBatchInfo memory _storedBatchInfo) internal pure returns (bytes32) { + return keccak256(abi.encode(_storedBatchInfo)); } /// @notice Returns if the bit at index {_index} is 1 diff --git a/ethereum/contracts/zksync/facets/Getters.sol b/ethereum/contracts/zksync/facets/Getters.sol index 07a6cc57a6..ef5bf2f3d4 100644 --- a/ethereum/contracts/zksync/facets/Getters.sol +++ b/ethereum/contracts/zksync/facets/Getters.sol @@ -7,10 +7,11 @@ import "../libraries/Diamond.sol"; import "../libraries/PriorityQueue.sol"; import "../../common/libraries/UncheckedMath.sol"; import "../interfaces/IGetters.sol"; +import "../interfaces/ILegacyGetters.sol"; -/// @title Getters Contract implements functions for getting contract state from outside the blockchain. +/// @title Getters Contract implements functions for getting contract state from outside the batchchain. /// @author Matter Labs -contract GettersFacet is Base, IGetters { +contract GettersFacet is Base, IGetters, ILegacyGetters { using UncheckedMath for uint256; using PriorityQueue for PriorityQueue.Queue; @@ -35,19 +36,19 @@ contract GettersFacet is Base, IGetters { return s.pendingGovernor; } - /// @return The total number of blocks that were committed - function getTotalBlocksCommitted() external view returns (uint256) { - return s.totalBlocksCommitted; + /// @return The total number of batches that were committed + function getTotalBatchesCommitted() external view returns (uint256) { + return s.totalBatchesCommitted; } - /// @return The total number of blocks that were committed & verified - function getTotalBlocksVerified() external view returns (uint256) { - return s.totalBlocksVerified; + /// @return The total number of batches that were committed & verified + function getTotalBatchesVerified() external view returns (uint256) { + return s.totalBatchesVerified; } - /// @return The total number of blocks that were committed & verified & executed - function getTotalBlocksExecuted() external view returns (uint256) { - return s.totalBlocksExecuted; + /// @return The total number of batches that were committed & verified & executed + function getTotalBatchesExecuted() external view returns (uint256) { + return s.totalBatchesExecuted; } /// @return The total number of priority operations that were added to the priority queue, including all processed ones @@ -77,16 +78,16 @@ contract GettersFacet is Base, IGetters { return s.validators[_address]; } - /// @return Merkle root of the tree with L2 logs for the selected block - function l2LogsRootHash(uint256 _blockNumber) external view returns (bytes32) { - return s.l2LogsRootHashes[_blockNumber]; + /// @return Merkle root of the tree with L2 logs for the selected batch + function l2LogsRootHash(uint256 _batchNumber) external view returns (bytes32) { + return s.l2LogsRootHashes[_batchNumber]; } - /// @notice For unfinalized (non executed) blocks may change - /// @dev returns zero for non-committed blocks - /// @return The hash of committed L2 block. - function storedBlockHash(uint256 _blockNumber) external view returns (bytes32) { - return s.storedBlockHashes[_blockNumber]; + /// @notice For unfinalized (non executed) batches may change + /// @dev returns zero for non-committed batches + /// @return The hash of committed L2 batch. + function storedBatchHash(uint256 _batchNumber) external view returns (bytes32) { + return s.storedBatchHashes[_batchNumber]; } /// @return Bytecode hash of bootloader program. @@ -139,13 +140,13 @@ contract GettersFacet is Base, IGetters { return s.l2SystemContractsUpgradeTxHash; } - /// @return The L2 block number in which the upgrade transaction was processed. + /// @return The L2 batch number in which the upgrade transaction was processed. /// @dev It is equal to 0 in the following two cases: /// - No upgrade transaction has ever been processed. - /// - The upgrade transaction has been processed and the block with such transaction has been + /// - The upgrade transaction has been processed and the batch with such transaction has been /// executed (i.e. finalized). - function getL2SystemContractsUpgradeBlockNumber() external view returns (uint256) { - return s.l2SystemContractsUpgradeBlockNumber; + function getL2SystemContractsUpgradeBatchNumber() external view returns (uint256) { + return s.l2SystemContractsUpgradeBatchNumber; } /// @return The number of received upgrade approvals from the security council @@ -190,10 +191,10 @@ contract GettersFacet is Base, IGetters { } /// @return Whether a withdrawal has been finalized. - /// @param _l2BlockNumber The L2 block number within which the withdrawal happened. + /// @param _l2BatchNumber The L2 batch number within which the withdrawal happened. /// @param _l2MessageIndex The index of the L2->L1 message denoting the withdrawal. - function isEthWithdrawalFinalized(uint256 _l2BlockNumber, uint256 _l2MessageIndex) external view returns (bool) { - return s.isEthWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex]; + function isEthWithdrawalFinalized(uint256 _l2BatchNumber, uint256 _l2MessageIndex) external view returns (bool) { + return s.isEthWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex]; } /*////////////////////////////////////////////////////////////// @@ -232,4 +233,44 @@ contract GettersFacet is Base, IGetters { Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage(); return ds.selectorToFacet[_selector].facetAddress; } + + /*////////////////////////////////////////////////////////////// + DEPRECATED METHODS + //////////////////////////////////////////////////////////////*/ + + /// @return The total number of batches that were committed + /// @dev It is a *deprecated* method, please use `getTotalBatchesCommitted` instead + function getTotalBlocksCommitted() external view returns (uint256) { + return s.totalBatchesCommitted; + } + + /// @return The total number of batches that were committed & verified + /// @dev It is a *deprecated* method, please use `getTotalBatchesVerified` instead. + function getTotalBlocksVerified() external view returns (uint256) { + return s.totalBatchesVerified; + } + + /// @return The total number of batches that were committed & verified & executed + /// @dev It is a *deprecated* method, please use `getTotalBatchesExecuted` instead. + function getTotalBlocksExecuted() external view returns (uint256) { + return s.totalBatchesExecuted; + } + + /// @notice For unfinalized (non executed) batches may change + /// @dev It is a *deprecated* method, please use `storedBatchHash` instead. + /// @dev returns zero for non-committed batches + /// @return The hash of committed L2 batch. + function storedBlockHash(uint256 _batchNumber) external view returns (bytes32) { + return s.storedBatchHashes[_batchNumber]; + } + + /// @return The L2 batch number in which the upgrade transaction was processed. + /// @dev It is a *deprecated* method, please use `getL2SystemContractsUpgradeBatchNumber` instead. + /// @dev It is equal to 0 in the following two cases: + /// - No upgrade transaction has ever been processed. + /// - The upgrade transaction has been processed and the batch with such transaction has been + /// executed (i.e. finalized). + function getL2SystemContractsUpgradeBlockNumber() external view returns (uint256) { + return s.l2SystemContractsUpgradeBatchNumber; + } } diff --git a/ethereum/contracts/zksync/facets/Mailbox.sol b/ethereum/contracts/zksync/facets/Mailbox.sol index 2718fc0f74..e794ebda38 100644 --- a/ethereum/contracts/zksync/facets/Mailbox.sol +++ b/ethereum/contracts/zksync/facets/Mailbox.sol @@ -25,55 +25,55 @@ contract MailboxFacet is Base, IMailbox { string public constant override getName = "MailboxFacet"; - /// @notice Prove that a specific arbitrary-length message was sent in a specific L2 block number - /// @param _blockNumber The executed L2 block number in which the message appeared + /// @notice Prove that a specific arbitrary-length message was sent in a specific L2 batch number + /// @param _batchNumber The executed L2 batch number in which the message appeared /// @param _index The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _message Information about the sent message: sender address, the message itself, tx index in the L2 block where the message was sent + /// @param _message Information about the sent message: sender address, the message itself, tx index in the L2 batch where the message was sent /// @param _proof Merkle proof for inclusion of L2 log that was sent with the message /// @return Whether the proof is valid function proveL2MessageInclusion( - uint256 _blockNumber, + uint256 _batchNumber, uint256 _index, L2Message memory _message, bytes32[] calldata _proof ) public view returns (bool) { - return _proveL2LogInclusion(_blockNumber, _index, _L2MessageToLog(_message), _proof); + return _proveL2LogInclusion(_batchNumber, _index, _L2MessageToLog(_message), _proof); } - /// @notice Prove that a specific L2 log was sent in a specific L2 block - /// @param _blockNumber The executed L2 block number in which the log appeared + /// @notice Prove that a specific L2 log was sent in a specific L2 batch + /// @param _batchNumber The executed L2 batch number in which the log appeared /// @param _index The position of the l2log in the L2 logs Merkle tree /// @param _log Information about the sent log /// @param _proof Merkle proof for inclusion of the L2 log - /// @return Whether the proof is correct and L2 log is included in block + /// @return Whether the proof is correct and L2 log is included in batch function proveL2LogInclusion( - uint256 _blockNumber, + uint256 _batchNumber, uint256 _index, L2Log memory _log, bytes32[] calldata _proof ) external view returns (bool) { - return _proveL2LogInclusion(_blockNumber, _index, _log, _proof); + return _proveL2LogInclusion(_batchNumber, _index, _log, _proof); } /// @notice Prove that the L1 -> L2 transaction was processed with the specified status. /// @param _l2TxHash The L2 canonical transaction hash - /// @param _l2BlockNumber The L2 block number where the transaction was processed + /// @param _l2BatchNumber The L2 batch number where the transaction was processed /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent + /// @param _l2TxNumberInBatch The L2 transaction number in the batch, in which the log was sent /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction /// @param _status The execution status of the L1 -> L2 transaction (true - success & 0 - fail) /// @return Whether the proof is correct and the transaction was actually executed with provided status /// NOTE: It may return `false` for incorrect proof, but it doesn't mean that the L1 -> L2 transaction has an opposite status! function proveL1ToL2TransactionStatus( bytes32 _l2TxHash, - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes32[] calldata _merkleProof, TxStatus _status ) public view override returns (bool) { // Bootloader sends an L2 -> L1 log only after processing the L1 -> L2 transaction. - // Thus, we can verify that the L1 -> L2 transaction was included in the L2 block with specified status. + // Thus, we can verify that the L1 -> L2 transaction was included in the L2 batch with specified status. // // The semantics of such L2 -> L1 log is always: // - sender = L2_BOOTLOADER_ADDRESS @@ -81,16 +81,16 @@ contract MailboxFacet is Base, IMailbox { // - value = status of the processing transaction (1 - success & 0 - fail) // - isService = true (just a conventional value) // - l2ShardId = 0 (means that L1 -> L2 transaction was processed in a rollup shard, other shards are not available yet anyway) - // - txNumberInBlock = number of transaction in the block + // - txNumberInBatch = number of transaction in the batch L2Log memory l2Log = L2Log({ l2ShardId: 0, isService: true, - txNumberInBlock: _l2TxNumberInBlock, + txNumberInBatch: _l2TxNumberInBatch, sender: L2_BOOTLOADER_ADDRESS, key: _l2TxHash, value: bytes32(uint256(_status)) }); - return _proveL2LogInclusion(_l2BlockNumber, _l2MessageIndex, l2Log, _merkleProof); + return _proveL2LogInclusion(_l2BatchNumber, _l2MessageIndex, l2Log, _merkleProof); } /// @notice Transfer ether from the contract to the receiver @@ -104,17 +104,17 @@ contract MailboxFacet is Base, IMailbox { require(callSuccess, "pz"); } - /// @dev Prove that a specific L2 log was sent in a specific L2 block number + /// @dev Prove that a specific L2 log was sent in a specific L2 batch number function _proveL2LogInclusion( - uint256 _blockNumber, + uint256 _batchNumber, uint256 _index, L2Log memory _log, bytes32[] calldata _proof ) internal view returns (bool) { - require(_blockNumber <= s.totalBlocksExecuted, "xx"); + require(_batchNumber <= s.totalBatchesExecuted, "xx"); bytes32 hashedLog = keccak256( - abi.encodePacked(_log.l2ShardId, _log.isService, _log.txNumberInBlock, _log.sender, _log.key, _log.value) + abi.encodePacked(_log.l2ShardId, _log.isService, _log.txNumberInBatch, _log.sender, _log.key, _log.value) ); // Check that hashed log is not the default one, // otherwise it means that the value is out of range of sent L2 -> L1 logs @@ -125,7 +125,7 @@ contract MailboxFacet is Base, IMailbox { // equal to the length of other nodes preimages (which are `2 * 32`) bytes32 calculatedRootHash = Merkle.calculateRoot(_proof, _index, hashedLog); - bytes32 actualRootHash = s.l2LogsRootHashes[_blockNumber]; + bytes32 actualRootHash = s.l2LogsRootHashes[_batchNumber]; return actualRootHash == calculatedRootHash; } @@ -136,7 +136,7 @@ contract MailboxFacet is Base, IMailbox { L2Log({ l2ShardId: 0, isService: true, - txNumberInBlock: _message.txNumberInBlock, + txNumberInBatch: _message.txNumberInBatch, sender: L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, key: bytes32(uint256(uint160(_message.sender))), value: keccak256(_message.data) @@ -169,32 +169,32 @@ contract MailboxFacet is Base, IMailbox { } /// @notice Finalize the withdrawal and release funds - /// @param _l2BlockNumber The L2 block number where the withdrawal was processed + /// @param _l2BatchNumber The L2 batch number where the withdrawal was processed /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message - /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent + /// @param _l2TxNumberInBatch The L2 transaction number in a batch, in which the log was sent /// @param _message The L2 withdraw data, stored in an L2 -> L1 message /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization function finalizeEthWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes calldata _message, bytes32[] calldata _merkleProof ) external override nonReentrant senderCanCallFunction(s.allowList) { - require(!s.isEthWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex], "jj"); + require(!s.isEthWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex], "jj"); L2Message memory l2ToL1Message = L2Message({ - txNumberInBlock: _l2TxNumberInBlock, + txNumberInBatch: _l2TxNumberInBatch, sender: L2_ETH_TOKEN_SYSTEM_CONTRACT_ADDR, data: _message }); (address _l1WithdrawReceiver, uint256 _amount) = _parseL2WithdrawalMessage(_message); - bool proofValid = proveL2MessageInclusion(_l2BlockNumber, _l2MessageIndex, l2ToL1Message, _merkleProof); + bool proofValid = proveL2MessageInclusion(_l2BatchNumber, _l2MessageIndex, l2ToL1Message, _merkleProof); require(proofValid, "pi"); // Failed to verify that withdrawal was actually initialized on L2 - s.isEthWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex] = true; + s.isEthWithdrawalFinalized[_l2BatchNumber][_l2MessageIndex] = true; _withdrawFunds(_l1WithdrawReceiver, _amount); emit EthWithdrawalFinalized(_l1WithdrawReceiver, _amount); diff --git a/ethereum/contracts/zksync/interfaces/IExecutor.sol b/ethereum/contracts/zksync/interfaces/IExecutor.sol index 19391a51a1..c4c36bd95d 100644 --- a/ethereum/contracts/zksync/interfaces/IExecutor.sol +++ b/ethereum/contracts/zksync/interfaces/IExecutor.sol @@ -10,7 +10,7 @@ enum SystemLogKey { TOTAL_L2_TO_L1_PUBDATA_KEY, STATE_DIFF_HASH_KEY, PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - PREV_BLOCK_HASH_KEY, + PREV_BATCH_HASH_KEY, CHAINED_PRIORITY_TXN_HASH_KEY, NUMBER_OF_LAYER_1_TXS_KEY, EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH @@ -26,18 +26,18 @@ uint256 constant L2_LOG_KEY_OFFSET = 24; uint256 constant L2_LOG_VALUE_OFFSET = 56; interface IExecutor is IBase { - /// @notice Rollup block stored data - /// @param blockNumber Rollup block number - /// @param blockHash Hash of L2 block + /// @notice Rollup batch stored data + /// @param batchNumber Rollup batch number + /// @param batchHash Hash of L2 batch /// @param indexRepeatedStorageChanges The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more /// @param numberOfLayer1Txs Number of priority operations to be processed - /// @param priorityOperationsHash Hash of all priority operations from this block - /// @param l2LogsTreeRoot Root hash of tree that contains L2 -> L1 messages from this block - /// @param timestamp Rollup block timestamp, have the same format as Ethereum block constant + /// @param priorityOperationsHash Hash of all priority operations from this batch + /// @param l2LogsTreeRoot Root hash of tree that contains L2 -> L1 messages from this batch + /// @param timestamp Rollup batch timestamp, have the same format as Ethereum batch constant /// @param commitment Verified input for the zkSync circuit - struct StoredBlockInfo { - uint64 blockNumber; - bytes32 blockHash; + struct StoredBatchInfo { + uint64 batchNumber; + bytes32 batchHash; uint64 indexRepeatedStorageChanges; uint256 numberOfLayer1Txs; bytes32 priorityOperationsHash; @@ -46,19 +46,19 @@ interface IExecutor is IBase { bytes32 commitment; } - /// @notice Data needed to commit new block - /// @param blockNumber Number of the committed block - /// @param timestamp Unix timestamp denoting the start of the block execution + /// @notice Data needed to commit new batch + /// @param batchNumber Number of the committed batch + /// @param timestamp Unix timestamp denoting the start of the batch execution /// @param indexRepeatedStorageChanges The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more /// @param newStateRoot The state root of the full state tree /// @param numberOfLayer1Txs Number of priority operations to be processed - /// @param priorityOperationsHash Hash of all priority operations from this block + /// @param priorityOperationsHash Hash of all priority operations from this batch /// @param bootloaderHeapInitialContentsHash Hash of the initial contents of the bootloader heap. In practice it serves as the commitment to the transactions in the batch. /// @param eventsQueueStateHash Hash of the events queue state. In practice it serves as the commitment to the events in the batch. - /// @param systemLogs concatenation of all L2 -> L1 system logs in the block + /// @param systemLogs concatenation of all L2 -> L1 system logs in the batch /// @param totalL2ToL1Pubdata Total pubdata committed to as part of bootloader run. Contents are: l2Tol1Logs <> l2Tol1Messages <> publishedBytecodes <> stateDiffs - struct CommitBlockInfo { - uint64 blockNumber; + struct CommitBatchInfo { + uint64 batchNumber; uint64 timestamp; uint64 indexRepeatedStorageChanges; bytes32 newStateRoot; @@ -76,28 +76,32 @@ interface IExecutor is IBase { uint256[] serializedProof; } - function commitBlocks(StoredBlockInfo calldata _lastCommittedBlockData, CommitBlockInfo[] calldata _newBlocksData) + function commitBatches(StoredBatchInfo calldata _lastCommittedBatchData, CommitBatchInfo[] calldata _newBatchesData) external; - function proveBlocks( - StoredBlockInfo calldata _prevBlock, - StoredBlockInfo[] calldata _committedBlocks, + function proveBatches( + StoredBatchInfo calldata _prevBatch, + StoredBatchInfo[] calldata _committedBatches, ProofInput calldata _proof ) external; - function executeBlocks(StoredBlockInfo[] calldata _blocksData) external; + function executeBatches(StoredBatchInfo[] calldata _batchesData) external; - function revertBlocks(uint256 _newLastBlock) external; + function revertBatches(uint256 _newLastBatch) external; - /// @notice Event emitted when a block is committed - event BlockCommit(uint256 indexed blockNumber, bytes32 indexed blockHash, bytes32 indexed commitment); + /// @notice Event emitted when a batch is committed + /// @dev It has the name "BlockCommit" and not "BatchCommit" due to backward compatibility considerations + event BlockCommit(uint256 indexed batchNumber, bytes32 indexed batchHash, bytes32 indexed commitment); - /// @notice Event emitted when blocks are verified - event BlocksVerification(uint256 indexed previousLastVerifiedBlock, uint256 indexed currentLastVerifiedBlock); + /// @notice Event emitted when batches are verified + /// @dev It has the name "BlocksVerification" and not "BatchesVerification" due to backward compatibility considerations + event BlocksVerification(uint256 indexed previousLastVerifiedBatch, uint256 indexed currentLastVerifiedBatch); - /// @notice Event emitted when a block is executed - event BlockExecution(uint256 indexed blockNumber, bytes32 indexed blockHash, bytes32 indexed commitment); + /// @notice Event emitted when a batch is executed + /// @dev It has the name "BlockExecution" and not "BatchExecution" due to backward compatibility considerations + event BlockExecution(uint256 indexed batchNumber, bytes32 indexed batchHash, bytes32 indexed commitment); - /// @notice Event emitted when blocks are reverted - event BlocksRevert(uint256 totalBlocksCommitted, uint256 totalBlocksVerified, uint256 totalBlocksExecuted); + /// @notice Event emitted when batches are reverted + /// @dev It has the name "BlocksRevert" and not "BatchesRevert" due to backward compatibility considerations + event BlocksRevert(uint256 totalBatchesCommitted, uint256 totalBatchesVerified, uint256 totalBatchesExecuted); } diff --git a/ethereum/contracts/zksync/interfaces/IGetters.sol b/ethereum/contracts/zksync/interfaces/IGetters.sol index 0860bcff3a..f88c3b9d7b 100644 --- a/ethereum/contracts/zksync/interfaces/IGetters.sol +++ b/ethereum/contracts/zksync/interfaces/IGetters.sol @@ -17,11 +17,11 @@ interface IGetters is IBase { function getPendingGovernor() external view returns (address); - function getTotalBlocksCommitted() external view returns (uint256); + function getTotalBatchesCommitted() external view returns (uint256); - function getTotalBlocksVerified() external view returns (uint256); + function getTotalBatchesVerified() external view returns (uint256); - function getTotalBlocksExecuted() external view returns (uint256); + function getTotalBatchesExecuted() external view returns (uint256); function getTotalPriorityTxs() external view returns (uint256); @@ -33,9 +33,9 @@ interface IGetters is IBase { function isValidator(address _address) external view returns (bool); - function l2LogsRootHash(uint256 _blockNumber) external view returns (bytes32 hash); + function l2LogsRootHash(uint256 _batchNumber) external view returns (bytes32 hash); - function storedBlockHash(uint256 _blockNumber) external view returns (bytes32); + function storedBatchHash(uint256 _batchNumber) external view returns (bytes32); function getL2BootloaderBytecodeHash() external view returns (bytes32); @@ -59,7 +59,7 @@ interface IGetters is IBase { function getL2SystemContractsUpgradeTxHash() external view returns (bytes32); - function getL2SystemContractsUpgradeBlockNumber() external view returns (uint256); + function getL2SystemContractsUpgradeBatchNumber() external view returns (uint256); function isApprovedBySecurityCouncil() external view returns (bool); @@ -67,7 +67,7 @@ interface IGetters is IBase { function getAllowList() external view returns (address); - function isEthWithdrawalFinalized(uint256 _l2BlockNumber, uint256 _l2MessageIndex) external view returns (bool); + function isEthWithdrawalFinalized(uint256 _l2BatchNumber, uint256 _l2MessageIndex) external view returns (bool); /*////////////////////////////////////////////////////////////// DIAMOND LOUPE diff --git a/ethereum/contracts/zksync/interfaces/ILegacyGetters.sol b/ethereum/contracts/zksync/interfaces/ILegacyGetters.sol new file mode 100644 index 0000000000..6fb4c87156 --- /dev/null +++ b/ethereum/contracts/zksync/interfaces/ILegacyGetters.sol @@ -0,0 +1,21 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.13; + +import "../libraries/PriorityQueue.sol"; +import "./IBase.sol"; + +/// @author Matter Labs +/// @dev This interface contains getters for the zkSync contract that should not be used, +/// but still are keot for backward compatibility. +interface ILegacyGetters is IBase { + function getTotalBlocksCommitted() external view returns (uint256); + + function getTotalBlocksVerified() external view returns (uint256); + + function getTotalBlocksExecuted() external view returns (uint256); + + function storedBlockHash(uint256 _batchNumber) external view returns (bytes32); + + function getL2SystemContractsUpgradeBlockNumber() external view returns (uint256); +} diff --git a/ethereum/contracts/zksync/interfaces/IMailbox.sol b/ethereum/contracts/zksync/interfaces/IMailbox.sol index 6d37d080af..a4ec1d2336 100644 --- a/ethereum/contracts/zksync/interfaces/IMailbox.sol +++ b/ethereum/contracts/zksync/interfaces/IMailbox.sol @@ -21,8 +21,8 @@ interface IMailbox is IBase { /// @param to The recipient's address. `uint256` type for possible address format changes and maintaining backward compatibility /// @param gasLimit The L2 gas limit for L2 transaction. Analog to the `gasLimit` on an L1 transactions /// @param gasPerPubdataByteLimit Maximum number of L2 gas that will cost one byte of pubdata (every piece of data that will be stored on L1 as calldata) - /// @param maxFeePerGas The absolute maximum sender willing to pay per unit of L2 gas to get the transaction included in a block. Analog to the EIP-1559 `maxFeePerGas` on an L1 transactions - /// @param maxPriorityFeePerGas The additional fee that is paid directly to the validator to incentivize them to include the transaction in a block. Analog to the EIP-1559 `maxPriorityFeePerGas` on an L1 transactions + /// @param maxFeePerGas The absolute maximum sender willing to pay per unit of L2 gas to get the transaction included in a batch. Analog to the EIP-1559 `maxFeePerGas` on an L1 transactions + /// @param maxPriorityFeePerGas The additional fee that is paid directly to the validator to incentivize them to include the transaction in a batch. Analog to the EIP-1559 `maxPriorityFeePerGas` on an L1 transactions /// @param paymaster The address of the EIP-4337 paymaster, that will pay fees for the transaction. `uint256` type for possible address format changes and maintaining backward compatibility /// @param nonce The nonce of the transaction. For L1->L2 transactions it is the priority operation Id. /// @param value The value to pass with the transaction @@ -87,14 +87,14 @@ interface IMailbox is IBase { } function proveL2MessageInclusion( - uint256 _blockNumber, + uint256 _l2BatchNumber, uint256 _index, L2Message calldata _message, bytes32[] calldata _proof ) external view returns (bool); function proveL2LogInclusion( - uint256 _blockNumber, + uint256 _l2BatchNumber, uint256 _index, L2Log memory _log, bytes32[] calldata _proof @@ -102,17 +102,17 @@ interface IMailbox is IBase { function proveL1ToL2TransactionStatus( bytes32 _l2TxHash, - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes32[] calldata _merkleProof, TxStatus _status ) external view returns (bool); function finalizeEthWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes calldata _message, bytes32[] calldata _merkleProof ) external; diff --git a/ethereum/contracts/zksync/libraries/TransactionValidator.sol b/ethereum/contracts/zksync/libraries/TransactionValidator.sol index a4742cd31d..69310d91c3 100644 --- a/ethereum/contracts/zksync/libraries/TransactionValidator.sol +++ b/ethereum/contracts/zksync/libraries/TransactionValidator.sol @@ -100,9 +100,9 @@ library TransactionValidator { return costForComputation + costForPubdata; } - /// @notice Based on the full L2 gas limit (that includes the block overhead) and other + /// @notice Based on the full L2 gas limit (that includes the batch overhead) and other /// properties of the transaction, returns the l2GasLimit for the body of the transaction (the actual execution). - /// @param _totalGasLimit The L2 gas limit that includes both the overhead for processing the block + /// @param _totalGasLimit The L2 gas limit that includes both the overhead for processing the batch /// and the L2 gas needed to process the transaction itself (i.e. the actual l2GasLimit that will be used for the transaction). /// @param _gasPricePerPubdata The L2 gas price for each byte of pubdata. /// @param _encodingLength The length of the ABI-encoding of the transaction. @@ -121,11 +121,11 @@ library TransactionValidator { } /// @notice Based on the total L2 gas limit and several other parameters of the transaction - /// returns the part of the L2 gas that will be spent on the block's overhead. + /// returns the part of the L2 gas that will be spent on the batch's overhead. /// @dev The details of how this function works can be checked in the documentation /// of the fee model of zkSync. The appropriate comments are also present /// in the Rust implementation description of function `get_maximal_allowed_overhead`. - /// @param _totalGasLimit The L2 gas limit that includes both the overhead for processing the block + /// @param _totalGasLimit The L2 gas limit that includes both the overhead for processing the batch /// and the L2 gas needed to process the transaction itself (i.e. the actual gasLimit that will be used for the transaction). /// @param _gasPricePerPubdata The maximum amount of L2 gas that the operator may charge the user for a single byte of pubdata. /// @param _encodingLength The length of the binary encoding of the transaction in bytes @@ -133,36 +133,36 @@ library TransactionValidator { uint256 _totalGasLimit, uint256 _gasPricePerPubdata, uint256 _encodingLength - ) internal pure returns (uint256 blockOverheadForTransaction) { - uint256 blockOverheadGas = BLOCK_OVERHEAD_L2_GAS + BLOCK_OVERHEAD_PUBDATA * _gasPricePerPubdata; + ) internal pure returns (uint256 batchOverheadForTransaction) { + uint256 batchOverheadGas = BATCH_OVERHEAD_L2_GAS + BATCH_OVERHEAD_PUBDATA * _gasPricePerPubdata; // The overhead from taking up the transaction's slot - uint256 txSlotOverhead = Math.ceilDiv(blockOverheadGas, MAX_TRANSACTIONS_IN_BLOCK); - blockOverheadForTransaction = Math.max(blockOverheadForTransaction, txSlotOverhead); + uint256 txSlotOverhead = Math.ceilDiv(batchOverheadGas, MAX_TRANSACTIONS_IN_BATCH); + batchOverheadForTransaction = Math.max(batchOverheadForTransaction, txSlotOverhead); // The overhead for occupying the bootloader memory can be derived from encoded_len - uint256 overheadForLength = Math.ceilDiv(_encodingLength * blockOverheadGas, BOOTLOADER_TX_ENCODING_SPACE); - blockOverheadForTransaction = Math.max(blockOverheadForTransaction, overheadForLength); + uint256 overheadForLength = Math.ceilDiv(_encodingLength * batchOverheadGas, BOOTLOADER_TX_ENCODING_SPACE); + batchOverheadForTransaction = Math.max(batchOverheadForTransaction, overheadForLength); // The overhead for possible published public data // TODO: possibly charge a separate fee for possible pubdata spending // uint256 overheadForPublicData; // { - // uint256 numerator = (blockOverheadGas * _totalGasLimit + _gasPricePerPubdata * MAX_PUBDATA_PER_BLOCK); - // uint256 denominator = (_gasPricePerPubdata * MAX_PUBDATA_PER_BLOCK + blockOverheadGas); + // uint256 numerator = (batchOverheadGas * _totalGasLimit + _gasPricePerPubdata * MAX_PUBDATA_PER_BATCH); + // uint256 denominator = (_gasPricePerPubdata * MAX_PUBDATA_PER_BATCH + batchOverheadGas); // overheadForPublicData = (numerator - 1) / denominator; // } - // blockOverheadForTransaction = Math.max(blockOverheadForTransaction, overheadForPublicData); + // batchOverheadForTransaction = Math.max(batchOverheadForTransaction, overheadForPublicData); // The overhead for ergs that could be used to use single-instance circuits uint256 overheadForGas; { - uint256 numerator = blockOverheadGas * _totalGasLimit + L2_TX_MAX_GAS_LIMIT; - uint256 denominator = L2_TX_MAX_GAS_LIMIT + blockOverheadGas; + uint256 numerator = batchOverheadGas * _totalGasLimit + L2_TX_MAX_GAS_LIMIT; + uint256 denominator = L2_TX_MAX_GAS_LIMIT + batchOverheadGas; overheadForGas = (numerator - 1) / denominator; } - blockOverheadForTransaction = Math.max(blockOverheadForTransaction, overheadForGas); + batchOverheadForTransaction = Math.max(batchOverheadForTransaction, overheadForGas); } } diff --git a/ethereum/scripts/utils.ts b/ethereum/scripts/utils.ts index 27634376e2..3b7c2a68da 100644 --- a/ethereum/scripts/utils.ts +++ b/ethereum/scripts/utils.ts @@ -74,7 +74,7 @@ export function applyL1ToL2Alias(address: string): string { return ethers.utils.hexlify(ethers.BigNumber.from(address).add(L1_TO_L2_ALIAS_OFFSET).mod(ADDRESS_MODULO)); } -export function readBlockBootloaderBytecode() { +export function readBatchBootloaderBytecode() { const bootloaderPath = path.join(process.env.ZKSYNC_HOME as string, `etc/system-contracts/bootloader`); return fs.readFileSync(`${bootloaderPath}/build/artifacts/proved_batch.yul/proved_batch.yul.zbin`); } diff --git a/ethereum/src.ts/deploy.ts b/ethereum/src.ts/deploy.ts index 86a0b51159..53872bf311 100644 --- a/ethereum/src.ts/deploy.ts +++ b/ethereum/src.ts/deploy.ts @@ -17,12 +17,12 @@ import { getAddressFromEnv, getHashFromEnv, getNumberFromEnv, - readBlockBootloaderBytecode, + readBatchBootloaderBytecode, getTokens } from '../scripts/utils'; import { deployViaCreate2 } from './deploy-utils'; -const L2_BOOTLOADER_BYTECODE_HASH = hexlify(hashL2Bytecode(readBlockBootloaderBytecode())); +const L2_BOOTLOADER_BYTECODE_HASH = hexlify(hashL2Bytecode(readBatchBootloaderBytecode())); const L2_DEFAULT_ACCOUNT_BYTECODE_HASH = hexlify(hashL2Bytecode(readSystemContractsBytecode('DefaultAccount'))); export interface DeployedAddresses { @@ -104,9 +104,9 @@ export class Deployer { this.addresses.ZkSync.GovernanceFacet ) ); - const genesisBlockHash = getHashFromEnv('CONTRACTS_GENESIS_ROOT'); // TODO: confusing name + const genesisBatchHash = getHashFromEnv('CONTRACTS_GENESIS_ROOT'); // TODO: confusing name const genesisRollupLeafIndex = getNumberFromEnv('CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX'); - const genesisBlockCommitment = getHashFromEnv('CONTRACTS_GENESIS_BLOCK_COMMITMENT'); + const genesisBatchCommitment = getHashFromEnv('CONTRACTS_GENESIS_BATCH_COMMITMENT'); const verifierParams = { recursionNodeLevelVkHash: getHashFromEnv('CONTRACTS_RECURSION_NODE_LEVEL_VK_HASH'), recursionLeafLevelVkHash: getHashFromEnv('CONTRACTS_RECURSION_LEAF_LEVEL_VK_HASH'), @@ -118,9 +118,9 @@ export class Deployer { const diamondInitCalldata = DiamondInit.encodeFunctionData('initialize', [ this.addresses.ZkSync.Verifier, this.governorAddress, - genesisBlockHash, + genesisBatchHash, genesisRollupLeafIndex, - genesisBlockCommitment, + genesisBatchCommitment, this.addresses.AllowList, verifierParams, false, // isPorterAvailable diff --git a/ethereum/test/unit_tests/executor_test.spec.ts b/ethereum/test/unit_tests/executor_test.spec.ts index aac399ab04..98ee95596c 100644 --- a/ethereum/test/unit_tests/executor_test.spec.ts +++ b/ethereum/test/unit_tests/executor_test.spec.ts @@ -22,9 +22,9 @@ import { SYSTEM_LOG_KEYS, constructL2Log, createSystemLogs, - genesisStoredBlockInfo, + genesisStoredBatchInfo, getCallRevertReason, - packBatchTimestampAndBlockTimestamp, + packBatchTimestampAndBatchTimestamp, requestExecute } from './utils'; @@ -36,11 +36,11 @@ describe(`Executor tests`, function () { let executor: ExecutorFacet; let getters: GettersFacet; let mailbox: MailboxFacet; - let newCommitedBlockBlockHash: any; - let newCommitedBlockCommitment: any; + let newCommitedBatchHash: any; + let newCommitedBatchCommitment: any; let currentTimestamp: number; - let newCommitBlockInfo: any; - let newStoredBlockInfo: any; + let newCommitBatchInfo: any; + let newStoredBatchInfo: any; let logs: any; const proofInput = { @@ -120,9 +120,9 @@ describe(`Executor tests`, function () { }); describe(`Authorization check`, function () { - const storedBlockInfo = { - blockNumber: 0, - blockHash: ethers.utils.randomBytes(32), + const storedBatchInfo = { + batchNumber: 0, + batchHash: ethers.utils.randomBytes(32), indexRepeatedStorageChanges: 0, numberOfLayer1Txs: 0, priorityOperationsHash: ethers.utils.randomBytes(32), @@ -130,8 +130,8 @@ describe(`Executor tests`, function () { timestamp: 0, commitment: ethers.utils.randomBytes(32) }; - const commitBlockInfo = { - blockNumber: 0, + const commitBatchInfo = { + batchNumber: 0, timestamp: 0, indexRepeatedStorageChanges: 0, newStateRoot: ethers.utils.randomBytes(32), @@ -145,21 +145,21 @@ describe(`Executor tests`, function () { it(`Should revert on committing by unauthorised address`, async () => { const revertReason = await getCallRevertReason( - executor.connect(randomSigner).commitBlocks(storedBlockInfo, [commitBlockInfo]) + executor.connect(randomSigner).commitBatches(storedBatchInfo, [commitBatchInfo]) ); expect(revertReason).equal(`1h`); }); it(`Should revert on proving by unauthorised address`, async () => { const revertReason = await getCallRevertReason( - executor.connect(randomSigner).proveBlocks(storedBlockInfo, [storedBlockInfo], proofInput) + executor.connect(randomSigner).proveBatches(storedBatchInfo, [storedBatchInfo], proofInput) ); expect(revertReason).equal(`1h`); }); it(`Should revert on executing by unauthorised address`, async () => { const revertReason = await getCallRevertReason( - executor.connect(randomSigner).executeBlocks([storedBlockInfo]) + executor.connect(randomSigner).executeBatches([storedBatchInfo]) ); expect(revertReason).equal(`1h`); }); @@ -169,8 +169,8 @@ describe(`Executor tests`, function () { before(async () => { currentTimestamp = (await hardhat.ethers.providers.getDefaultProvider().getBlock(`latest`)).timestamp; logs = ethers.utils.hexConcat([`0x00000007`].concat(createSystemLogs())); - newCommitBlockInfo = { - blockNumber: 1, + newCommitBatchInfo = { + batchNumber: 1, timestamp: currentTimestamp, indexRepeatedStorageChanges: 0, newStateRoot: ethers.utils.randomBytes(32), @@ -183,102 +183,102 @@ describe(`Executor tests`, function () { }; }); - it(`Should revert on committing with wrong last committed block data`, async () => { - const wrongGenesisStoredBlockInfo = Object.assign({}, genesisStoredBlockInfo()); - wrongGenesisStoredBlockInfo.timestamp = 1000; // wrong timestamp + it(`Should revert on committing with wrong last committed batch data`, async () => { + const wrongGenesisStoredBatchInfo = Object.assign({}, genesisStoredBatchInfo()); + wrongGenesisStoredBatchInfo.timestamp = 1000; // wrong timestamp const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(wrongGenesisStoredBlockInfo, [newCommitBlockInfo]) + executor.connect(validator).commitBatches(wrongGenesisStoredBatchInfo, [newCommitBatchInfo]) ); expect(revertReason).equal(`i`); }); - it(`Should revert on committing with wrong order of blocks`, async () => { - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.blockNumber = 2; //wrong block number + it(`Should revert on committing with wrong order of batches`, async () => { + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.batchNumber = 2; //wrong batch number const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`f`); }); - it(`Should revert on committing with wrong new block timestamp`, async () => { - const wrongNewBlockTimestamp = ethers.utils.hexValue(ethers.utils.randomBytes(32)); // correct value is 0 + it(`Should revert on committing with wrong new batch timestamp`, async () => { + const wrongNewBatchTimestamp = ethers.utils.hexValue(ethers.utils.randomBytes(32)); // correct value is 0 var wrongL2Logs = createSystemLogs(); wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - wrongNewBlockTimestamp.toString() + wrongNewBatchTimestamp.toString() ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`tb`); }); - it(`Should revert on committing with too small new block timestamp`, async () => { - const wrongNewBlockTimestamp = 1; // too small + it(`Should revert on committing with too small new batch timestamp`, async () => { + const wrongNewBatchTimestamp = 1; // too small var wrongL2Logs = createSystemLogs(); wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, ethers.utils.hexlify( - packBatchTimestampAndBlockTimestamp(wrongNewBlockTimestamp, wrongNewBlockTimestamp) + packBatchTimestampAndBatchTimestamp(wrongNewBatchTimestamp, wrongNewBatchTimestamp) ) ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); - wrongNewCommitBlockInfo.timestamp = wrongNewBlockTimestamp; + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + wrongNewCommitBatchInfo.timestamp = wrongNewBatchTimestamp; const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`h1`); }); it(`Should revert on committing with too big last L2 block timestamp`, async () => { - const wrongNewBlockTimestamp = `0xffffffff`; // too big + const wrongNewBatchTimestamp = `0xffffffff`; // too big var wrongL2Logs = createSystemLogs(); wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(wrongNewBlockTimestamp, wrongNewBlockTimestamp) + packBatchTimestampAndBatchTimestamp(wrongNewBatchTimestamp, wrongNewBatchTimestamp) ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); - wrongNewCommitBlockInfo.timestamp = parseInt(wrongNewBlockTimestamp); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + wrongNewCommitBatchInfo.timestamp = parseInt(wrongNewBatchTimestamp); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`h2`); }); - it(`Should revert on committing with wrong previous blockhash`, async () => { - const wrongPreviousBlockHash = ethers.utils.randomBytes(32); // correct value is bytes32(0) + it(`Should revert on committing with wrong previous batchhash`, async () => { + const wrongPreviousBatchHash = ethers.utils.randomBytes(32); // correct value is bytes32(0) var wrongL2Logs = createSystemLogs(); - wrongL2Logs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + wrongL2Logs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(wrongPreviousBlockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(wrongPreviousBatchHash) ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`l`); }); @@ -287,11 +287,11 @@ describe(`Executor tests`, function () { var wrongL2Logs = createSystemLogs(); delete wrongL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY]; - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000006`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000006`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`b7`); }); @@ -307,11 +307,11 @@ describe(`Executor tests`, function () { ) ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`kp`); }); @@ -327,11 +327,11 @@ describe(`Executor tests`, function () { ethers.constants.HashZero ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`sc`); }); @@ -346,11 +346,11 @@ describe(`Executor tests`, function () { ethers.utils.hexlify(wrongChainedPriorityHash) ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`t`); }); @@ -364,12 +364,12 @@ describe(`Executor tests`, function () { ethers.utils.hexlify(0x01) ); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); - wrongNewCommitBlockInfo.numberOfLayer1Txs = 2; // wrong number + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + wrongNewCommitBatchInfo.numberOfLayer1Txs = 2; // wrong number const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`ta`); }); @@ -378,11 +378,11 @@ describe(`Executor tests`, function () { var wrongL2Logs = createSystemLogs(); wrongL2Logs.push(constructL2Log(true, L2_SYSTEM_CONTEXT_ADDRESS, 119, ethers.constants.HashZero)); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000008`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(`ul`); }); @@ -403,11 +403,11 @@ describe(`Executor tests`, function () { var wrong_addr = ethers.utils.hexlify(ethers.utils.randomBytes(20)); wrongL2Logs[i] = constructL2Log(true, wrong_addr, i, tests[i][0]); - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(wrongL2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal(tests[i][1]); } @@ -418,48 +418,48 @@ describe(`Executor tests`, function () { var l2Logs = createSystemLogs(); delete l2Logs[i]; - const wrongNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - wrongNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000006`].concat(l2Logs)); + const wrongNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + wrongNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000006`].concat(l2Logs)); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [wrongNewCommitBlockInfo]) + executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [wrongNewCommitBatchInfo]) ); expect(revertReason).equal('b7'); } }); - it(`Should successfully commit a block`, async () => { + it(`Should successfully commit a batch`, async () => { var correctL2Logs = createSystemLogs(); correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + packBatchTimestampAndBatchTimestamp(currentTimestamp, currentTimestamp) ); - const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); + const correctNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + correctNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); const commitTx = await executor .connect(validator) - .commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); + .commitBatches(genesisStoredBatchInfo(), [correctNewCommitBatchInfo]); const result = await commitTx.wait(); - newCommitedBlockBlockHash = result.events[0].args.blockHash; - newCommitedBlockCommitment = result.events[0].args.commitment; + newCommitedBatchHash = result.events[0].args.batchHash; + newCommitedBatchCommitment = result.events[0].args.commitment; - expect(await getters.getTotalBlocksCommitted()).equal(1); + expect(await getters.getTotalBatchesCommitted()).equal(1); }); }); describe(`Proving functionality`, async function () { before(async () => { // Reusing the old timestamp - currentTimestamp = newCommitBlockInfo.timestamp; + currentTimestamp = newCommitBatchInfo.timestamp; - newCommitBlockInfo = { - blockNumber: 1, + newCommitBatchInfo = { + batchNumber: 1, timestamp: currentTimestamp, indexRepeatedStorageChanges: 0, newStateRoot: ethers.utils.randomBytes(32), @@ -471,43 +471,45 @@ describe(`Executor tests`, function () { totalL2ToL1Pubdata: ethers.constants.HashZero }; - newStoredBlockInfo = { - blockNumber: 1, - blockHash: newCommitedBlockBlockHash, + newStoredBatchInfo = { + batchNumber: 1, + batchHash: newCommitedBatchHash, indexRepeatedStorageChanges: 0, numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, l2LogsTreeRoot: ethers.constants.HashZero, timestamp: currentTimestamp, - commitment: newCommitedBlockCommitment + commitment: newCommitedBatchCommitment }; }); - it(`Should revert on proving with wrong previous block data`, async () => { - const wrongPreviousStoredBlockInfo = Object.assign({}, genesisStoredBlockInfo()); - wrongPreviousStoredBlockInfo.blockNumber = 10; // Correct is 0 + it(`Should revert on proving with wrong previous batch data`, async () => { + const wrongPreviousStoredBatchInfo = Object.assign({}, genesisStoredBatchInfo()); + wrongPreviousStoredBatchInfo.batchNumber = 10; // Correct is 0 const revertReason = await getCallRevertReason( - executor.connect(validator).proveBlocks(wrongPreviousStoredBlockInfo, [newStoredBlockInfo], proofInput) + executor.connect(validator).proveBatches(wrongPreviousStoredBatchInfo, [newStoredBatchInfo], proofInput) ); expect(revertReason).equal(`t1`); }); - it(`Should revert on proving with wrong committed block`, async () => { - const wrongNewStoredBlockInfo = Object.assign({}, newStoredBlockInfo); - wrongNewStoredBlockInfo.blockNumber = 10; // Correct is 1 + it(`Should revert on proving with wrong committed batch`, async () => { + const wrongNewStoredBatchInfo = Object.assign({}, newStoredBatchInfo); + wrongNewStoredBatchInfo.batchNumber = 10; // Correct is 1 const revertReason = await getCallRevertReason( - executor.connect(validator).proveBlocks(genesisStoredBlockInfo(), [wrongNewStoredBlockInfo], proofInput) + executor + .connect(validator) + .proveBatches(genesisStoredBatchInfo(), [wrongNewStoredBatchInfo], proofInput) ); expect(revertReason).equal(`o1`); }); - it(`Should not allow proving a reverted block without commiting again`, async () => { - await executor.connect(validator).revertBlocks(0); + it(`Should not allow proving a reverted batch without commiting again`, async () => { + await executor.connect(validator).revertBatches(0); const revertReason = await getCallRevertReason( - executor.connect(validator).proveBlocks(genesisStoredBlockInfo(), [newStoredBlockInfo], proofInput) + executor.connect(validator).proveBatches(genesisStoredBatchInfo(), [newStoredBatchInfo], proofInput) ); expect(revertReason).equal(`q`); }); @@ -518,59 +520,59 @@ describe(`Executor tests`, function () { true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + packBatchTimestampAndBatchTimestamp(currentTimestamp, currentTimestamp) ); - const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); + const correctNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + correctNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); var commitTx = await executor .connect(validator) - .commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); + .commitBatches(genesisStoredBatchInfo(), [correctNewCommitBatchInfo]); var result = await commitTx.wait(); - newStoredBlockInfo.blockHash = result.events[0].args.blockHash; - newStoredBlockInfo.commitment = result.events[0].args.commitment; + newStoredBatchInfo.batchHash = result.events[0].args.batchHash; + newStoredBatchInfo.commitment = result.events[0].args.commitment; - await executor.connect(validator).proveBlocks(genesisStoredBlockInfo(), [newStoredBlockInfo], proofInput); - expect(await getters.getTotalBlocksVerified()).equal(1); + await executor.connect(validator).proveBatches(genesisStoredBatchInfo(), [newStoredBatchInfo], proofInput); + expect(await getters.getTotalBatchesVerified()).equal(1); }); }); - describe(`Reverting blocks functionality`, async function () { - it(`Should not allow reverting more blocks than already committed`, async () => { - const revertReason = await getCallRevertReason(executor.connect(validator).revertBlocks(10)); + describe(`Reverting batches functionality`, async function () { + it(`Should not allow reverting more batches than already committed`, async () => { + const revertReason = await getCallRevertReason(executor.connect(validator).revertBatches(10)); expect(revertReason).equal(`v1`); }); }); describe(`Executing functionality`, async function () { - it(`Should revert on executing a block with wrong block number`, async () => { - const wrongNewStoredBlockInfo = Object.assign({}, newStoredBlockInfo); - wrongNewStoredBlockInfo.blockNumber = 10; // correct is 1 + it(`Should revert on executing a batch with wrong batch number`, async () => { + const wrongNewStoredBatchInfo = Object.assign({}, newStoredBatchInfo); + wrongNewStoredBatchInfo.batchNumber = 10; // correct is 1 const revertReason = await getCallRevertReason( - executor.connect(validator).executeBlocks([wrongNewStoredBlockInfo]) + executor.connect(validator).executeBatches([wrongNewStoredBatchInfo]) ); expect(revertReason).equal(`k`); }); - it(`Should revert on executing a block with wrong data`, async () => { - const wrongNewStoredBlockInfo = Object.assign({}, newStoredBlockInfo); - wrongNewStoredBlockInfo.timestamp = 0; // incorrect data + it(`Should revert on executing a batch with wrong data`, async () => { + const wrongNewStoredBatchInfo = Object.assign({}, newStoredBatchInfo); + wrongNewStoredBatchInfo.timestamp = 0; // incorrect data const revertReason = await getCallRevertReason( - executor.connect(validator).executeBlocks([wrongNewStoredBlockInfo]) + executor.connect(validator).executeBatches([wrongNewStoredBatchInfo]) ); expect(revertReason).equal(`exe10`); }); - it(`Should revert on executing a reverted block without committing and proving again`, async () => { - await executor.connect(validator).revertBlocks(0); + it(`Should revert on executing a reverted batch without committing and proving again`, async () => { + await executor.connect(validator).revertBatches(0); const revertReason = await getCallRevertReason( - executor.connect(validator).executeBlocks([newStoredBlockInfo]) + executor.connect(validator).executeBatches([newStoredBatchInfo]) ); expect(revertReason).equal(`n`); }); @@ -586,7 +588,7 @@ describe(`Executor tests`, function () { true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + packBatchTimestampAndBatchTimestamp(currentTimestamp, currentTimestamp) ); correctL2Logs[SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY] = constructL2Log( true, @@ -601,34 +603,34 @@ describe(`Executor tests`, function () { '0x01' ); - const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); + const correctNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + correctNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); - correctNewCommitBlockInfo.priorityOperationsHash = chainedPriorityTxHash; - correctNewCommitBlockInfo.numberOfLayer1Txs = 1; + correctNewCommitBatchInfo.priorityOperationsHash = chainedPriorityTxHash; + correctNewCommitBatchInfo.numberOfLayer1Txs = 1; const commitTx = await executor .connect(validator) - .commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); + .commitBatches(genesisStoredBatchInfo(), [correctNewCommitBatchInfo]); const result = await commitTx.wait(); - const correctNewStoredBlockInfo = Object.assign({}, newStoredBlockInfo); - correctNewStoredBlockInfo.blockHash = result.events[0].args.blockHash; - correctNewStoredBlockInfo.numberOfLayer1Txs = 1; - correctNewStoredBlockInfo.priorityOperationsHash = chainedPriorityTxHash; - correctNewStoredBlockInfo.commitment = result.events[0].args.commitment; + const correctNewStoredBatchInfo = Object.assign({}, newStoredBatchInfo); + correctNewStoredBatchInfo.batchHash = result.events[0].args.batchHash; + correctNewStoredBatchInfo.numberOfLayer1Txs = 1; + correctNewStoredBatchInfo.priorityOperationsHash = chainedPriorityTxHash; + correctNewStoredBatchInfo.commitment = result.events[0].args.commitment; await executor .connect(validator) - .proveBlocks(genesisStoredBlockInfo(), [correctNewStoredBlockInfo], proofInput); + .proveBatches(genesisStoredBatchInfo(), [correctNewStoredBatchInfo], proofInput); const revertReason = await getCallRevertReason( - executor.connect(validator).executeBlocks([correctNewStoredBlockInfo]) + executor.connect(validator).executeBatches([correctNewStoredBatchInfo]) ); expect(revertReason).equal(`s`); - await executor.connect(validator).revertBlocks(0); + await executor.connect(validator).revertBatches(0); }); it(`Should revert on executing with unmatched prioirty operation hash`, async () => { @@ -642,7 +644,7 @@ describe(`Executor tests`, function () { true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + packBatchTimestampAndBatchTimestamp(currentTimestamp, currentTimestamp) ); correctL2Logs[SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY] = constructL2Log( true, @@ -657,26 +659,26 @@ describe(`Executor tests`, function () { '0x01' ); - const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); - correctNewCommitBlockInfo.priorityOperationsHash = chainedPriorityTxHash; - correctNewCommitBlockInfo.numberOfLayer1Txs = 1; + const correctNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + correctNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); + correctNewCommitBatchInfo.priorityOperationsHash = chainedPriorityTxHash; + correctNewCommitBatchInfo.numberOfLayer1Txs = 1; const commitTx = await executor .connect(validator) - .commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); + .commitBatches(genesisStoredBatchInfo(), [correctNewCommitBatchInfo]); const result = await commitTx.wait(); - const correctNewStoredBlockInfo = Object.assign({}, newStoredBlockInfo); - correctNewStoredBlockInfo.blockHash = result.events[0].args.blockHash; - correctNewStoredBlockInfo.numberOfLayer1Txs = 1; - correctNewStoredBlockInfo.priorityOperationsHash = chainedPriorityTxHash; - correctNewStoredBlockInfo.commitment = result.events[0].args.commitment; + const correctNewStoredBatchInfo = Object.assign({}, newStoredBatchInfo); + correctNewStoredBatchInfo.batchHash = result.events[0].args.batchHash; + correctNewStoredBatchInfo.numberOfLayer1Txs = 1; + correctNewStoredBatchInfo.priorityOperationsHash = chainedPriorityTxHash; + correctNewStoredBatchInfo.commitment = result.events[0].args.commitment; await executor .connect(validator) - .proveBlocks(genesisStoredBlockInfo(), [correctNewStoredBlockInfo], proofInput); + .proveBatches(genesisStoredBatchInfo(), [correctNewStoredBatchInfo], proofInput); await requestExecute( mailbox, @@ -689,14 +691,14 @@ describe(`Executor tests`, function () { ); const revertReason = await getCallRevertReason( - executor.connect(validator).executeBlocks([correctNewStoredBlockInfo]) + executor.connect(validator).executeBatches([correctNewStoredBatchInfo]) ); expect(revertReason).equal(`x`); - await executor.connect(validator).revertBlocks(0); + await executor.connect(validator).revertBatches(0); }); - it(`Should fail to commit block with wrong previous blockhash`, async () => { + it(`Should fail to commit batch with wrong previous batchhash`, async () => { const correctL2Logs = ethers.utils.hexConcat([ `0x00000001`, `0x00000000`, @@ -705,35 +707,35 @@ describe(`Executor tests`, function () { ethers.constants.HashZero ]); - const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.l2Logs = correctL2Logs; + const correctNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + correctNewCommitBatchInfo.l2Logs = correctL2Logs; - const block = genesisStoredBlockInfo(); - block.blockHash = '0x' + '1'.repeat(64); + const batch = genesisStoredBatchInfo(); + batch.batchHash = '0x' + '1'.repeat(64); const revertReason = await getCallRevertReason( - executor.connect(validator).commitBlocks(block, [correctNewCommitBlockInfo]) + executor.connect(validator).commitBatches(batch, [correctNewCommitBatchInfo]) ); expect(revertReason).to.equal('i'); }); - it(`Should execute a block successfully`, async () => { + it(`Should execute a batch successfully`, async () => { var correctL2Logs = createSystemLogs(); correctL2Logs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(currentTimestamp, currentTimestamp) + packBatchTimestampAndBatchTimestamp(currentTimestamp, currentTimestamp) ); - const correctNewCommitBlockInfo = Object.assign({}, newCommitBlockInfo); - correctNewCommitBlockInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); + const correctNewCommitBatchInfo = Object.assign({}, newCommitBatchInfo); + correctNewCommitBatchInfo.systemLogs = ethers.utils.hexConcat([`0x00000007`].concat(correctL2Logs)); - await executor.connect(validator).commitBlocks(genesisStoredBlockInfo(), [correctNewCommitBlockInfo]); - await executor.connect(validator).proveBlocks(genesisStoredBlockInfo(), [newStoredBlockInfo], proofInput); - await executor.connect(validator).executeBlocks([newStoredBlockInfo]); + await executor.connect(validator).commitBatches(genesisStoredBatchInfo(), [correctNewCommitBatchInfo]); + await executor.connect(validator).proveBatches(genesisStoredBatchInfo(), [newStoredBatchInfo], proofInput); + await executor.connect(validator).executeBatches([newStoredBatchInfo]); - expect(await getters.getTotalBlocksExecuted()).equal(1); + expect(await getters.getTotalBatchesExecuted()).equal(1); }); }); }); diff --git a/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts b/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts index a3a8f5a3ac..e058436f33 100644 --- a/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts +++ b/ethereum/test/unit_tests/l1_erc20_bridge_test.spec.ts @@ -163,7 +163,7 @@ describe(`L1ERC20Bridge tests`, function () { expect(revertReason).equal(`nt`); }); - it(`Should revert on finalizing a withdrawal with wrong block number`, async () => { + it(`Should revert on finalizing a withdrawal with wrong batch number`, async () => { const functionSignature = `0x11a2ccc1`; const l1Receiver = await randomSigner.getAddress(); const l2ToL1message = ethers.utils.hexConcat([ diff --git a/ethereum/test/unit_tests/l2-upgrade.test.spec.ts b/ethereum/test/unit_tests/l2-upgrade.test.spec.ts index bbb82bb7ec..1c8829201c 100644 --- a/ethereum/test/unit_tests/l2-upgrade.test.spec.ts +++ b/ethereum/test/unit_tests/l2-upgrade.test.spec.ts @@ -19,16 +19,16 @@ import { getCallRevertReason, AccessMode, EMPTY_STRING_KECCAK, - genesisStoredBlockInfo, - StoredBlockInfo, - CommitBlockInfo, + genesisStoredBatchInfo, + StoredBatchInfo, + CommitBatchInfo, L2_SYSTEM_CONTEXT_ADDRESS, L2_BOOTLOADER_ADDRESS, createSystemLogs, SYSTEM_LOG_KEYS, constructL2Log, L2_TO_L1_MESSENGER, - packBatchTimestampAndBlockTimestamp + packBatchTimestampAndBatchTimestamp } from './utils'; import * as ethers from 'ethers'; import { BigNumber, BigNumberish, BytesLike } from 'ethers'; @@ -47,8 +47,8 @@ describe('L2 upgrade test', function () { let diamondProxyContract: ethers.Contract; let owner: ethers.Signer; - let block1Info: CommitBlockInfo; - let storedBlock1Info: StoredBlockInfo; + let batch1Info: CommitBatchInfo; + let storedBatch1Info: StoredBatchInfo; let verifier: string; let verifierParams: VerifierParams; @@ -134,12 +134,12 @@ describe('L2 upgrade test', function () { await (await proxyGovernance.setValidator(await owner.getAddress(), true)).wait(); }); - it('Upgrade should work even if not all blocks are processed', async () => { - block1Info = await buildCommitBlockInfo(genesisStoredBlockInfo(), { - blockNumber: 1 + it('Upgrade should work even if not all batches are processed', async () => { + batch1Info = await buildCommitBatchInfo(genesisStoredBatchInfo(), { + batchNumber: 1 }); - const commitReceipt = await (await proxyExecutor.commitBlocks(genesisStoredBlockInfo(), [block1Info])).wait(); + const commitReceipt = await (await proxyExecutor.commitBatches(genesisStoredBatchInfo(), [batch1Info])).wait(); const commitment = commitReceipt.events[0].args.commitment; expect(await proxyGetters.getProtocolVersion()).to.equal(0); @@ -154,9 +154,9 @@ describe('L2 upgrade test', function () { expect(await proxyGetters.getProtocolVersion()).to.equal(1); - storedBlock1Info = getBlockStoredInfo(block1Info, commitment); + storedBatch1Info = getBatchStoredInfo(batch1Info, commitment); - await makeExecutedEqualCommitted(proxyExecutor, genesisStoredBlockInfo(), [storedBlock1Info], []); + await makeExecutedEqualCommitted(proxyExecutor, genesisStoredBatchInfo(), [storedBatch1Info], []); }); it('Timestamp should behave correctly', async () => { @@ -469,16 +469,16 @@ describe('L2 upgrade test', function () { expect(revertReason).to.equal('Previous upgrade has not been finalized'); }); - it('Should require that the next commit blocks contains an upgrade tx', async () => { + it('Should require that the next commit batches contains an upgrade tx', async () => { if (!l2UpgradeTxHash) { throw new Error('Can not perform this test without l2UpgradeTxHash'); } - const block2InfoNoUpgradeTx = await buildCommitBlockInfo(storedBlock1Info, { - blockNumber: 2 + const batch2InfoNoUpgradeTx = await buildCommitBatchInfo(storedBatch1Info, { + batchNumber: 2 }); const revertReason = await getCallRevertReason( - proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoNoUpgradeTx]) + proxyExecutor.commitBatches(storedBatch1Info, [batch2InfoNoUpgradeTx]) ); expect(revertReason).to.equal('b8'); }); @@ -505,22 +505,22 @@ describe('L2 upgrade test', function () { l2UpgradeTxHash ) ); - systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + systemLogs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(storedBlock1Info.blockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(storedBatch1Info.batchHash) ); - const block2InfoNoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( - storedBlock1Info, + const batch2InfoNoUpgradeTx = await buildCommitBatchInfoWithCustomLogs( + storedBatch1Info, { - blockNumber: 2 + batchNumber: 2 }, systemLogs ); const revertReason = await getCallRevertReason( - proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoNoUpgradeTx]) + proxyExecutor.commitBatches(storedBatch1Info, [batch2InfoNoUpgradeTx]) ); expect(revertReason).to.equal('kp'); }); @@ -536,24 +536,24 @@ describe('L2 upgrade test', function () { ethers.constants.HashZero ) ); - systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + systemLogs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(storedBlock1Info.blockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(storedBatch1Info.batchHash) ); - const block2InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( - storedBlock1Info, + const batch2InfoTwoUpgradeTx = await buildCommitBatchInfoWithCustomLogs( + storedBatch1Info, { - blockNumber: 2, + batchNumber: 2, timestamp }, systemLogs ); const revertReason = await getCallRevertReason( - proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoTwoUpgradeTx]) + proxyExecutor.commitBatches(storedBatch1Info, [batch2InfoTwoUpgradeTx]) ); expect(revertReason).to.equal('ut'); }); @@ -569,29 +569,29 @@ describe('L2 upgrade test', function () { l2UpgradeTxHash ) ); - systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + systemLogs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(storedBlock1Info.blockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(storedBatch1Info.batchHash) ); - const block2InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( - storedBlock1Info, + const batch2InfoTwoUpgradeTx = await buildCommitBatchInfoWithCustomLogs( + storedBatch1Info, { - blockNumber: 2, + batchNumber: 2, timestamp }, systemLogs ); - await (await proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoTwoUpgradeTx])).wait(); + await (await proxyExecutor.commitBatches(storedBatch1Info, [batch2InfoTwoUpgradeTx])).wait(); - expect(await proxyGetters.getL2SystemContractsUpgradeBlockNumber()).to.equal(2); + expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(2); }); - it('Should commit successfully when block was reverted and reupgraded', async () => { - await (await proxyExecutor.revertBlocks(1)).wait(); + it('Should commit successfully when batch was reverted and reupgraded', async () => { + await (await proxyExecutor.revertBatches(1)).wait(); const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; const systemLogs = createSystemLogs(); systemLogs.push( @@ -602,36 +602,36 @@ describe('L2 upgrade test', function () { l2UpgradeTxHash ) ); - systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + systemLogs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(storedBlock1Info.blockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(storedBatch1Info.batchHash) ); - const block2InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( - storedBlock1Info, + const batch2InfoTwoUpgradeTx = await buildCommitBatchInfoWithCustomLogs( + storedBatch1Info, { - blockNumber: 2, + batchNumber: 2, timestamp }, systemLogs ); const commitReceipt = await ( - await proxyExecutor.commitBlocks(storedBlock1Info, [block2InfoTwoUpgradeTx]) + await proxyExecutor.commitBatches(storedBatch1Info, [batch2InfoTwoUpgradeTx]) ).wait(); - expect(await proxyGetters.getL2SystemContractsUpgradeBlockNumber()).to.equal(2); + expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(2); const commitment = commitReceipt.events[0].args.commitment; - const newBlockStoredInfo = getBlockStoredInfo(block2InfoTwoUpgradeTx, commitment); - await makeExecutedEqualCommitted(proxyExecutor, storedBlock1Info, [newBlockStoredInfo], []); + const newBatchStoredInfo = getBatchStoredInfo(batch2InfoTwoUpgradeTx, commitment); + await makeExecutedEqualCommitted(proxyExecutor, storedBatch1Info, [newBatchStoredInfo], []); - storedBlock1Info = newBlockStoredInfo; + storedBatch1Info = newBatchStoredInfo; }); it('Should successfully commit a sequential upgrade', async () => { - expect(await proxyGetters.getL2SystemContractsUpgradeBlockNumber()).to.equal(0); + expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(0); await ( await executeTransparentUpgrade(proxyGetters, proxyDiamondCut, { newProtocolVersion: 5, @@ -641,35 +641,35 @@ describe('L2 upgrade test', function () { const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; const systemLogs = createSystemLogs(); - systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + systemLogs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(storedBlock1Info.blockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(storedBatch1Info.batchHash) ); - const block3InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( - storedBlock1Info, + const batch3InfoTwoUpgradeTx = await buildCommitBatchInfoWithCustomLogs( + storedBatch1Info, { - blockNumber: 3, + batchNumber: 3, timestamp }, systemLogs ); const commitReceipt = await ( - await proxyExecutor.commitBlocks(storedBlock1Info, [block3InfoTwoUpgradeTx]) + await proxyExecutor.commitBatches(storedBatch1Info, [batch3InfoTwoUpgradeTx]) ).wait(); const commitment = commitReceipt.events[0].args.commitment; - const newBlockStoredInfo = getBlockStoredInfo(block3InfoTwoUpgradeTx, commitment); + const newBatchStoredInfo = getBatchStoredInfo(batch3InfoTwoUpgradeTx, commitment); - expect(await proxyGetters.getL2SystemContractsUpgradeBlockNumber()).to.equal(0); + expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(0); - await makeExecutedEqualCommitted(proxyExecutor, storedBlock1Info, [newBlockStoredInfo], []); + await makeExecutedEqualCommitted(proxyExecutor, storedBatch1Info, [newBatchStoredInfo], []); - storedBlock1Info = newBlockStoredInfo; + storedBatch1Info = newBatchStoredInfo; - expect(await proxyGetters.getL2SystemContractsUpgradeBlockNumber()).to.equal(0); + expect(await proxyGetters.getL2SystemContractsUpgradeBatchNumber()).to.equal(0); }); it('Should successfully commit custom upgrade', async () => { @@ -695,38 +695,38 @@ describe('L2 upgrade test', function () { const timestamp = (await hardhat.ethers.provider.getBlock('latest')).timestamp; const systemLogs = createSystemLogs(); - systemLogs[SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY] = constructL2Log( + systemLogs[SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, - SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, - ethers.utils.hexlify(storedBlock1Info.blockHash) + SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, + ethers.utils.hexlify(storedBatch1Info.batchHash) ); - const block3InfoTwoUpgradeTx = await buildCommitBlockInfoWithCustomLogs( - storedBlock1Info, + const batch3InfoTwoUpgradeTx = await buildCommitBatchInfoWithCustomLogs( + storedBatch1Info, { - blockNumber: 4, + batchNumber: 4, timestamp }, systemLogs ); const commitReceipt = await ( - await proxyExecutor.commitBlocks(storedBlock1Info, [block3InfoTwoUpgradeTx]) + await proxyExecutor.commitBatches(storedBatch1Info, [batch3InfoTwoUpgradeTx]) ).wait(); const commitment = commitReceipt.events[0].args.commitment; - const newBlockStoredInfo = getBlockStoredInfo(block3InfoTwoUpgradeTx, commitment); + const newBatchStoredInfo = getBatchStoredInfo(batch3InfoTwoUpgradeTx, commitment); - await makeExecutedEqualCommitted(proxyExecutor, storedBlock1Info, [newBlockStoredInfo], []); + await makeExecutedEqualCommitted(proxyExecutor, storedBatch1Info, [newBatchStoredInfo], []); - storedBlock1Info = newBlockStoredInfo; + storedBatch1Info = newBatchStoredInfo; expect(upgradeEvents[1].name).to.equal('Test'); }); }); -type CommitBlockInfoWithTimestamp = Partial & { - blockNumber: BigNumberish; +type CommitBatchInfoWithTimestamp = Partial & { + batchNumber: BigNumberish; }; // An actual log should also contain shardId/isService and logIndex, @@ -737,11 +737,11 @@ interface L2ToL1Log { value: string; } -function contextLog(timestamp: number, prevBlockHash: BytesLike): L2ToL1Log { +function contextLog(timestamp: number, prevBatchHash: BytesLike): L2ToL1Log { return { sender: L2_SYSTEM_CONTEXT_ADDRESS, - key: packBatchTimestampAndBlockTimestamp(timestamp, timestamp), - value: ethers.utils.hexlify(prevBlockHash) + key: packBatchTimestampAndBatchTimestamp(timestamp, timestamp), + value: ethers.utils.hexlify(prevBatchHash) }; } @@ -767,17 +767,17 @@ function encodeLogs(logs: L2ToL1Log[]) { return ethers.utils.hexConcat(['0x00000000', joinedLogs]); } -async function buildCommitBlockInfo( - prevInfo: StoredBlockInfo, - info: CommitBlockInfoWithTimestamp -): Promise { +async function buildCommitBatchInfo( + prevInfo: StoredBatchInfo, + info: CommitBatchInfoWithTimestamp +): Promise { const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock('latest')).timestamp; let systemLogs = createSystemLogs(); systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(timestamp, timestamp) + packBatchTimestampAndBatchTimestamp(timestamp, timestamp) ); return { @@ -794,17 +794,17 @@ async function buildCommitBlockInfo( }; } -async function buildCommitBlockInfoWithCustomLogs( - prevInfo: StoredBlockInfo, - info: CommitBlockInfoWithTimestamp, +async function buildCommitBatchInfoWithCustomLogs( + prevInfo: StoredBatchInfo, + info: CommitBatchInfoWithTimestamp, systemLogs: string[] -): Promise { +): Promise { const timestamp = info.timestamp || (await hardhat.ethers.provider.getBlock('latest')).timestamp; systemLogs[SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY] = constructL2Log( true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - packBatchTimestampAndBlockTimestamp(timestamp, timestamp) + packBatchTimestampAndBatchTimestamp(timestamp, timestamp) ); const size = systemLogs.length == 7 ? `0x00000007` : `0x00000008`; @@ -823,10 +823,10 @@ async function buildCommitBlockInfoWithCustomLogs( }; } -function getBlockStoredInfo(commitInfo: CommitBlockInfo, commitment: string): StoredBlockInfo { +function getBatchStoredInfo(commitInfo: CommitBatchInfo, commitment: string): StoredBatchInfo { return { - blockNumber: commitInfo.blockNumber, - blockHash: commitInfo.newStateRoot, + batchNumber: commitInfo.batchNumber, + batchHash: commitInfo.newStateRoot, indexRepeatedStorageChanges: commitInfo.indexRepeatedStorageChanges, numberOfLayer1Txs: commitInfo.numberOfLayer1Txs, priorityOperationsHash: commitInfo.priorityOperationsHash, @@ -1001,18 +1001,18 @@ async function executeCustomTransparentUpgrade( async function makeExecutedEqualCommitted( proxyExecutor: ExecutorFacet, - prevBlockInfo: StoredBlockInfo, - blocksToProve: StoredBlockInfo[], - blocksToExecute: StoredBlockInfo[] + prevBatchInfo: StoredBatchInfo, + batchesToProve: StoredBatchInfo[], + batchesToExecute: StoredBatchInfo[] ) { - blocksToExecute = [...blocksToProve, ...blocksToExecute]; + batchesToExecute = [...batchesToProve, ...batchesToExecute]; await ( - await proxyExecutor.proveBlocks(prevBlockInfo, blocksToProve, { + await proxyExecutor.proveBatches(prevBatchInfo, batchesToProve, { recursiveAggregationInput: [], serializedProof: [] }) ).wait(); - await (await proxyExecutor.executeBlocks(blocksToExecute)).wait(); + await (await proxyExecutor.executeBatches(batchesToExecute)).wait(); } diff --git a/ethereum/test/unit_tests/utils.ts b/ethereum/test/unit_tests/utils.ts index bf3733def3..a7937b95a0 100644 --- a/ethereum/test/unit_tests/utils.ts +++ b/ethereum/test/unit_tests/utils.ts @@ -17,7 +17,7 @@ export enum SYSTEM_LOG_KEYS { TOTAL_L2_TO_L1_PUBDATA_KEY, STATE_DIFF_HASH_KEY, PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, - PREV_BLOCK_HASH_KEY, + PREV_BATCH_HASH_KEY, CHAINED_PRIORITY_TXN_HASH_KEY, NUMBER_OF_LAYER_1_TXS_KEY, EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH @@ -29,7 +29,7 @@ export const REQUIRED_L2_GAS_PRICE_PER_PUBDATA = /// Set of parameters that are needed to test the processing of priority operations export class DummyOp { - constructor(public id: number, public expirationBlock: BigNumber, public layer2Tip: number) {} + constructor(public id: number, public expirationBatch: BigNumber, public layer2Tip: number) {} } export enum AccessMode { @@ -117,7 +117,7 @@ export function createSystemLogs() { SYSTEM_LOG_KEYS.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, ethers.constants.HashZero ), - constructL2Log(true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PREV_BLOCK_HASH_KEY, ethers.constants.HashZero), + constructL2Log(true, L2_SYSTEM_CONTEXT_ADDRESS, SYSTEM_LOG_KEYS.PREV_BATCH_HASH_KEY, ethers.constants.HashZero), constructL2Log(true, L2_BOOTLOADER_ADDRESS, SYSTEM_LOG_KEYS.CHAINED_PRIORITY_TXN_HASH_KEY, EMPTY_STRING_KECCAK), constructL2Log( true, @@ -128,10 +128,10 @@ export function createSystemLogs() { ]; } -export function genesisStoredBlockInfo(): StoredBlockInfo { +export function genesisStoredBatchInfo(): StoredBatchInfo { return { - blockNumber: 0, - blockHash: ethers.constants.HashZero, + batchNumber: 0, + batchHash: ethers.constants.HashZero, indexRepeatedStorageChanges: 0, numberOfLayer1Txs: 0, priorityOperationsHash: EMPTY_STRING_KECCAK, @@ -141,19 +141,19 @@ export function genesisStoredBlockInfo(): StoredBlockInfo { }; } -// Packs the batch timestamp and block timestamp and returns the 32-byte hex string +// Packs the batch timestamp and L2 block timestamp and returns the 32-byte hex string // which should be used for the "key" field of the L2->L1 system context log. -export function packBatchTimestampAndBlockTimestamp( +export function packBatchTimestampAndBatchTimestamp( batchTimestamp: BigNumberish, - blockTimestamp: BigNumberish + l2BlockTimestamp: BigNumberish ): string { - const packedNum = BigNumber.from(batchTimestamp).shl(128).or(BigNumber.from(blockTimestamp)); + const packedNum = BigNumber.from(batchTimestamp).shl(128).or(BigNumber.from(l2BlockTimestamp)); return ethers.utils.hexZeroPad(ethers.utils.hexlify(packedNum), 32); } -export interface StoredBlockInfo { - blockNumber: BigNumberish; - blockHash: BytesLike; +export interface StoredBatchInfo { + batchNumber: BigNumberish; + batchHash: BytesLike; indexRepeatedStorageChanges: BigNumberish; numberOfLayer1Txs: BigNumberish; priorityOperationsHash: BytesLike; @@ -162,8 +162,8 @@ export interface StoredBlockInfo { commitment: BytesLike; } -export interface CommitBlockInfo { - blockNumber: BigNumberish; +export interface CommitBatchInfo { + batchNumber: BigNumberish; timestamp: number; indexRepeatedStorageChanges: BigNumberish; newStateRoot: BytesLike; diff --git a/ethereum/test/unit_tests/validator_timelock_test.spec.ts b/ethereum/test/unit_tests/validator_timelock_test.spec.ts index 06b71c4a28..313216c096 100644 --- a/ethereum/test/unit_tests/validator_timelock_test.spec.ts +++ b/ethereum/test/unit_tests/validator_timelock_test.spec.ts @@ -16,9 +16,9 @@ describe(`ValidatorTimelock tests`, function () { serializedProof: [] }; - function getMockCommitBlockInfo(blockNumber: number, timestamp: number = 0) { + function getMockCommitBatchInfo(batchNumber: number, timestamp: number = 0) { return { - blockNumber, + batchNumber, timestamp, indexRepeatedStorageChanges: 0, newStateRoot: ethers.constants.HashZero, @@ -31,10 +31,10 @@ describe(`ValidatorTimelock tests`, function () { }; } - function getMockStoredBlockInfo(blockNumber: number, timestamp: number = 0) { + function getMockStoredBatchInfo(batchNumber: number, timestamp: number = 0) { return { - blockNumber, - blockHash: ethers.constants.HashZero, + batchNumber, + batchHash: ethers.constants.HashZero, indexRepeatedStorageChanges: 0, numberOfLayer1Txs: 0, priorityOperationsHash: ethers.constants.HashZero, @@ -64,33 +64,35 @@ describe(`ValidatorTimelock tests`, function () { ); }); - it(`Should revert if non-validator commits blocks`, async () => { + it(`Should revert if non-validator commits batches`, async () => { const revertReason = await getCallRevertReason( - validatorTimelock.connect(randomSigner).commitBlocks(getMockStoredBlockInfo(0), [getMockCommitBlockInfo(1)]) + validatorTimelock + .connect(randomSigner) + .commitBatches(getMockStoredBatchInfo(0), [getMockCommitBatchInfo(1)]) ); expect(revertReason).equal('8h'); }); - it(`Should revert if non-validator proves blocks`, async () => { + it(`Should revert if non-validator proves batches`, async () => { const revertReason = await getCallRevertReason( validatorTimelock .connect(randomSigner) - .proveBlocks(getMockStoredBlockInfo(0), [getMockStoredBlockInfo(1)], MOCK_PROOF_INPUT) + .proveBatches(getMockStoredBatchInfo(0), [getMockStoredBatchInfo(1)], MOCK_PROOF_INPUT) ); expect(revertReason).equal('8h'); }); - it(`Should revert if non-validator revert blocks`, async () => { - const revertReason = await getCallRevertReason(validatorTimelock.connect(randomSigner).revertBlocks(1)); + it(`Should revert if non-validator revert batches`, async () => { + const revertReason = await getCallRevertReason(validatorTimelock.connect(randomSigner).revertBatches(1)); expect(revertReason).equal('8h'); }); - it(`Should revert if non-validator executes blocks`, async () => { + it(`Should revert if non-validator executes batches`, async () => { const revertReason = await getCallRevertReason( - validatorTimelock.connect(randomSigner).executeBlocks([getMockStoredBlockInfo(1)]) + validatorTimelock.connect(randomSigner).executeBatches([getMockStoredBatchInfo(1)]) ); expect(revertReason).equal('8h'); @@ -123,126 +125,130 @@ describe(`ValidatorTimelock tests`, function () { expect(await validatorTimelock.executionDelay()).equal(10); }); - it(`Should successfully commit blocks`, async () => { - await validatorTimelock.connect(validator).commitBlocks(getMockStoredBlockInfo(0), [getMockCommitBlockInfo(1)]); + it(`Should successfully commit batches`, async () => { + await validatorTimelock + .connect(validator) + .commitBatches(getMockStoredBatchInfo(0), [getMockCommitBatchInfo(1)]); - expect(await dummyExecutor.getTotalBlocksCommitted()).equal(1); + expect(await dummyExecutor.getTotalBatchesCommitted()).equal(1); }); - it(`Should successfully prove blocks`, async () => { + it(`Should successfully prove batches`, async () => { await validatorTimelock .connect(validator) - .proveBlocks(getMockStoredBlockInfo(0), [getMockStoredBlockInfo(1, 1)], MOCK_PROOF_INPUT); + .proveBatches(getMockStoredBatchInfo(0), [getMockStoredBatchInfo(1, 1)], MOCK_PROOF_INPUT); - expect(await dummyExecutor.getTotalBlocksVerified()).equal(1); + expect(await dummyExecutor.getTotalBatchesVerified()).equal(1); }); it(`Should revert on executing earlier than the delay`, async () => { const revertReason = await getCallRevertReason( - validatorTimelock.connect(validator).executeBlocks([getMockStoredBlockInfo(1)]) + validatorTimelock.connect(validator).executeBatches([getMockStoredBatchInfo(1)]) ); expect(revertReason).equal('5c'); }); - it(`Should successfully revert blocks`, async () => { - await validatorTimelock.connect(validator).revertBlocks(0); + it(`Should successfully revert batches`, async () => { + await validatorTimelock.connect(validator).revertBatches(0); - expect(await dummyExecutor.getTotalBlocksVerified()).equal(0); - expect(await dummyExecutor.getTotalBlocksCommitted()).equal(0); + expect(await dummyExecutor.getTotalBatchesVerified()).equal(0); + expect(await dummyExecutor.getTotalBatchesCommitted()).equal(0); }); - it(`Should successfully overwrite the committing timestamp on the reverted blocks timestamp`, async () => { - const revertedBlocksTimestamp = Number(await validatorTimelock.committedBlockTimestamp(1)); + it(`Should successfully overwrite the committing timestamp on the reverted batches timestamp`, async () => { + const revertedBatchesTimestamp = Number(await validatorTimelock.committedBatchTimestamp(1)); - await validatorTimelock.connect(validator).commitBlocks(getMockStoredBlockInfo(0), [getMockCommitBlockInfo(1)]); + await validatorTimelock + .connect(validator) + .commitBatches(getMockStoredBatchInfo(0), [getMockCommitBatchInfo(1)]); await validatorTimelock .connect(validator) - .proveBlocks(getMockStoredBlockInfo(0), [getMockStoredBlockInfo(1)], MOCK_PROOF_INPUT); + .proveBatches(getMockStoredBatchInfo(0), [getMockStoredBatchInfo(1)], MOCK_PROOF_INPUT); - const newBlocksTimestamp = Number(await validatorTimelock.committedBlockTimestamp(1)); + const newBatchesTimestamp = Number(await validatorTimelock.committedBatchTimestamp(1)); - expect(newBlocksTimestamp).greaterThanOrEqual(revertedBlocksTimestamp); + expect(newBatchesTimestamp).greaterThanOrEqual(revertedBatchesTimestamp); }); - it(`Should successfully execute blocks after the delay`, async () => { - await hardhat.network.provider.send('hardhat_mine', ['0x2', '0xc']); //mine 2 blocks with intervals of 12 seconds - await validatorTimelock.connect(validator).executeBlocks([getMockStoredBlockInfo(1)]); - expect(await dummyExecutor.getTotalBlocksExecuted()).equal(1); + it(`Should successfully execute batches after the delay`, async () => { + await hardhat.network.provider.send('hardhat_mine', ['0x2', '0xc']); //mine 2 batches with intervals of 12 seconds + await validatorTimelock.connect(validator).executeBatches([getMockStoredBatchInfo(1)]); + expect(await dummyExecutor.getTotalBatchesExecuted()).equal(1); }); - it('Should revert if validator tries to commit blocks with invalid last committed blockNumber', async () => { + it('Should revert if validator tries to commit batches with invalid last committed batchNumber', async () => { const revertReason = await getCallRevertReason( - validatorTimelock.connect(validator).commitBlocks(getMockStoredBlockInfo(0), [getMockCommitBlockInfo(2)]) + validatorTimelock.connect(validator).commitBatches(getMockStoredBatchInfo(0), [getMockCommitBatchInfo(2)]) ); // Error should be forwarded from the DummyExecutor - expect(revertReason).equal('DummyExecutor: Invalid last committed block number'); + expect(revertReason).equal('DummyExecutor: Invalid last committed batch number'); }); - // Test case to check if proving blocks with invalid blockNumber fails - it('Should revert if validator tries to prove blocks with invalid blockNumber', async () => { + // Test case to check if proving batches with invalid batchNumber fails + it('Should revert if validator tries to prove batches with invalid batchNumber', async () => { const revertReason = await getCallRevertReason( validatorTimelock .connect(validator) - .proveBlocks(getMockStoredBlockInfo(0), [getMockStoredBlockInfo(2, 1)], MOCK_PROOF_INPUT) + .proveBatches(getMockStoredBatchInfo(0), [getMockStoredBatchInfo(2, 1)], MOCK_PROOF_INPUT) ); - expect(revertReason).equal('DummyExecutor: Invalid previous block number'); + expect(revertReason).equal('DummyExecutor: Invalid previous batch number'); }); - it('Should revert if validator tries to execute more blocks than were proven', async () => { - await hardhat.network.provider.send('hardhat_mine', ['0x2', '0xc']); //mine 2 blocks with intervals of 12 seconds + it('Should revert if validator tries to execute more batches than were proven', async () => { + await hardhat.network.provider.send('hardhat_mine', ['0x2', '0xc']); //mine 2 batches with intervals of 12 seconds const revertReason = await getCallRevertReason( - validatorTimelock.connect(validator).executeBlocks([getMockStoredBlockInfo(2)]) + validatorTimelock.connect(validator).executeBatches([getMockStoredBatchInfo(2)]) ); - expect(revertReason).equal("DummyExecutor: Can't execute blocks more than committed and proven currently"); + expect(revertReason).equal("DummyExecutor: Can't execute batches more than committed and proven currently"); }); // These tests primarily needed to make gas statistics be more accurate. - it('Should commit multiple blocks in one transaction', async () => { + it('Should commit multiple batches in one transaction', async () => { await validatorTimelock .connect(validator) - .commitBlocks(getMockStoredBlockInfo(1), [ - getMockCommitBlockInfo(2), - getMockCommitBlockInfo(3), - getMockCommitBlockInfo(4), - getMockCommitBlockInfo(5), - getMockCommitBlockInfo(6), - getMockCommitBlockInfo(7), - getMockCommitBlockInfo(8) + .commitBatches(getMockStoredBatchInfo(1), [ + getMockCommitBatchInfo(2), + getMockCommitBatchInfo(3), + getMockCommitBatchInfo(4), + getMockCommitBatchInfo(5), + getMockCommitBatchInfo(6), + getMockCommitBatchInfo(7), + getMockCommitBatchInfo(8) ]); - expect(await dummyExecutor.getTotalBlocksCommitted()).equal(8); + expect(await dummyExecutor.getTotalBatchesCommitted()).equal(8); }); - it('Should prove multiple blocks in one transactions', async () => { + it('Should prove multiple batches in one transactions', async () => { for (let i = 1; i < 8; i++) { await validatorTimelock .connect(validator) - .proveBlocks(getMockStoredBlockInfo(i), [getMockStoredBlockInfo(i + 1)], MOCK_PROOF_INPUT); + .proveBatches(getMockStoredBatchInfo(i), [getMockStoredBatchInfo(i + 1)], MOCK_PROOF_INPUT); - expect(await dummyExecutor.getTotalBlocksVerified()).equal(i + 1); + expect(await dummyExecutor.getTotalBatchesVerified()).equal(i + 1); } }); - it('Should execute multiple blocks in multiple transactions', async () => { - await hardhat.network.provider.send('hardhat_mine', ['0x2', '0xc']); //mine 2 blocks with intervals of 12 seconds + it('Should execute multiple batches in multiple transactions', async () => { + await hardhat.network.provider.send('hardhat_mine', ['0x2', '0xc']); //mine 2 batches with intervals of 12 seconds await validatorTimelock .connect(validator) - .executeBlocks([ - getMockStoredBlockInfo(2), - getMockStoredBlockInfo(3), - getMockStoredBlockInfo(4), - getMockStoredBlockInfo(5), - getMockStoredBlockInfo(6), - getMockStoredBlockInfo(7), - getMockStoredBlockInfo(8) + .executeBatches([ + getMockStoredBatchInfo(2), + getMockStoredBatchInfo(3), + getMockStoredBatchInfo(4), + getMockStoredBatchInfo(5), + getMockStoredBatchInfo(6), + getMockStoredBatchInfo(7), + getMockStoredBatchInfo(8) ]); - expect(await dummyExecutor.getTotalBlocksExecuted()).equal(8); + expect(await dummyExecutor.getTotalBatchesExecuted()).equal(8); }); }); diff --git a/zksync/contracts/bridge/interfaces/IL1Bridge.sol b/zksync/contracts/bridge/interfaces/IL1Bridge.sol index a735ccb87e..0f0fe01669 100644 --- a/zksync/contracts/bridge/interfaces/IL1Bridge.sol +++ b/zksync/contracts/bridge/interfaces/IL1Bridge.sol @@ -5,9 +5,9 @@ pragma solidity ^0.8.0; /// @author Matter Labs interface IL1Bridge { function finalizeWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, - uint16 _l2TxNumberInBlock, + uint16 _l2TxNumberInBatch, bytes calldata _message, bytes32[] calldata _merkleProof ) external; From 3dd33b92f7153971c9634d28790dadc2093470fc Mon Sep 17 00:00:00 2001 From: Oles Holembovskyy <87322925+olesHolem@users.noreply.github.com> Date: Wed, 27 Sep 2023 14:12:01 +0300 Subject: [PATCH 11/11] Non recursive L1 verifier mode (#37) Co-authored-by: Stanislav Breadless --- .../test/VerifierRecursiveTest.sol | 60 ++++ .../dev-contracts/test/VerifierTest.sol | 2 + ethereum/contracts/zksync/Verifier.sol | 285 ++++++++++-------- ethereum/test/unit_tests/verifier.spec.ts | 211 ++++++++++++- 4 files changed, 420 insertions(+), 138 deletions(-) create mode 100644 ethereum/contracts/dev-contracts/test/VerifierRecursiveTest.sol diff --git a/ethereum/contracts/dev-contracts/test/VerifierRecursiveTest.sol b/ethereum/contracts/dev-contracts/test/VerifierRecursiveTest.sol new file mode 100644 index 0000000000..3dbd6c93dd --- /dev/null +++ b/ethereum/contracts/dev-contracts/test/VerifierRecursiveTest.sol @@ -0,0 +1,60 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.13; + +import "../../zksync/Verifier.sol"; + +/// @author Matter Labs +contract VerifierRecursiveTest is Verifier { + function _loadVerificationKey() internal pure override { + assembly { + mstore(VK_GATE_SETUP_0_X_SLOT, 0x08fa9d6f0dd6ac1cbeb94ae20fe7a23df05cb1095df66fb561190e615a4037ef) + mstore(VK_GATE_SETUP_0_Y_SLOT, 0x196dcc8692fe322d21375920559944c12ba7b1ba8b732344cf4ba2e3aa0fc8b4) + mstore(VK_GATE_SETUP_1_X_SLOT, 0x0074aaf5d97bd57551311a8b3e4aa7840bc55896502020b2f43ad6a98d81a443) + mstore(VK_GATE_SETUP_1_Y_SLOT, 0x2d275a3ad153dc9d89ebb9c9b6a0afd2dde82470554e9738d905c328fbb4c8bc) + mstore(VK_GATE_SETUP_2_X_SLOT, 0x287f1975a9aeaef5d2bb0767b5ef538f76e82f7da01c0cb6db8c6f920818ec4f) + mstore(VK_GATE_SETUP_2_Y_SLOT, 0x2fff6f53594129f794a7731d963d27e72f385c5c6d8e08829e6f66a9d29a12ea) + mstore(VK_GATE_SETUP_3_X_SLOT, 0x038809fa3d4b7320d43e023454194f0a7878baa7e73a295d2d105260f1c34cbc) + mstore(VK_GATE_SETUP_3_Y_SLOT, 0x25418b1105cf45b2a3da6c349bab1d9caaf145eaf24d1e8fb92c11654c000781) + mstore(VK_GATE_SETUP_4_X_SLOT, 0x0561cafd527ac3f0bc550db77d87cd1c63938f7ec051e62ebf84a5bbe07f9840) + mstore(VK_GATE_SETUP_4_Y_SLOT, 0x28f87201b4cbe19f1517a1c29ca6d6cb074502ccfed4c31c8931c6992c3eea43) + mstore(VK_GATE_SETUP_5_X_SLOT, 0x27e0af572bac6e36d31c33808cb44c0ef8ceee5e2850e916fb01f3747db72491) + mstore(VK_GATE_SETUP_5_Y_SLOT, 0x1da20087ba61c59366b21e31e4ac6889d357cf11bf16b94d875f94f41525c427) + mstore(VK_GATE_SETUP_6_X_SLOT, 0x2c2bcafea8f93d07f96874f470985a8d272c09c8ed49373f36497ee80bd8da17) + mstore(VK_GATE_SETUP_6_Y_SLOT, 0x299276cf6dca1a7e3780f6276c5d067403f6e024e83e0cc1ab4c5f7252b7f653) + mstore(VK_GATE_SETUP_7_X_SLOT, 0x0ba9d4a53e050da25b8410045b634f1ca065ff74acd35bab1a72bf1f20047ef3) + mstore(VK_GATE_SETUP_7_Y_SLOT, 0x1f1eefc8b0507a08f852f554bd7abcbd506e52de390ca127477a678d212abfe5) + + mstore(VK_GATE_SELECTORS_0_X_SLOT, 0x1c6b68d9920620012d85a4850dad9bd6d03ae8bbc7a08b827199e85dba1ef2b1) + mstore(VK_GATE_SELECTORS_0_Y_SLOT, 0x0f6380560d1b585628ed259289cec19d3a7c70c60e66bbfebfcb70c8c312d91e) + mstore(VK_GATE_SELECTORS_1_X_SLOT, 0x0dfead780e5067181aae631ff734a33fca302773472997daca58ba49dbd20dcc) + mstore(VK_GATE_SELECTORS_1_Y_SLOT, 0x00f13fa6e356f525d2fd1c533acf2858c0d2b9f0a9b3180f94e1543929c75073) + + mstore(VK_PERMUTATION_0_X_SLOT, 0x1df0747c787934650d99c5696f9273088ad07ec3e0825c9d39685a9b9978ebed) + mstore(VK_PERMUTATION_0_Y_SLOT, 0x2ace2a277becbc69af4e89518eb50960a733d9d71354845ea43d2e65c8e0e4cb) + mstore(VK_PERMUTATION_1_X_SLOT, 0x06598c8236a5f5045cd7444dc87f3e1f66f99bf01251e13be4dc0ab1f7f1af4b) + mstore(VK_PERMUTATION_1_Y_SLOT, 0x14ca234fe9b3bb1e5517fc60d6b90f8ad44b0899a2d4f71a64c9640b3142ce8b) + mstore(VK_PERMUTATION_2_X_SLOT, 0x01889e2c684caefde60471748f4259196ecf4209a735ccdf7b1816f05bafa50a) + mstore(VK_PERMUTATION_2_Y_SLOT, 0x092d287a080bfe2fd40ad392ff290e462cd0e347b8fd9d05b90af234ce77a11b) + mstore(VK_PERMUTATION_3_X_SLOT, 0x0dd98eeb5bc12c221da969398b67750a8774dbdd37a78da52367f9fc0e566d5c) + mstore(VK_PERMUTATION_3_Y_SLOT, 0x06750ceb40c9fb87fc424df9599340938b7552b759914a90cb0e41d3915c945b) + + mstore(VK_LOOKUP_SELECTOR_X_SLOT, 0x2f491c662ae53ceb358f57a868dc00b89befa853bd9a449127ea2d46820995bd) + mstore(VK_LOOKUP_SELECTOR_Y_SLOT, 0x231fe6538634ff8b6fa21ca248fb15e7f43d82eb0bfa705490d24ddb3e3cad77) + + mstore(VK_LOOKUP_TABLE_0_X_SLOT, 0x0ebe0de4a2f39df3b903da484c1641ffdffb77ff87ce4f9508c548659eb22d3c) + mstore(VK_LOOKUP_TABLE_0_Y_SLOT, 0x12a3209440242d5662729558f1017ed9dcc08fe49a99554dd45f5f15da5e4e0b) + mstore(VK_LOOKUP_TABLE_1_X_SLOT, 0x1b7d54f8065ca63bed0bfbb9280a1011b886d07e0c0a26a66ecc96af68c53bf9) + mstore(VK_LOOKUP_TABLE_1_Y_SLOT, 0x2c51121fff5b8f58c302f03c74e0cb176ae5a1d1730dec4696eb9cce3fe284ca) + mstore(VK_LOOKUP_TABLE_2_X_SLOT, 0x0138733c5faa9db6d4b8df9748081e38405999e511fb22d40f77cf3aef293c44) + mstore(VK_LOOKUP_TABLE_2_Y_SLOT, 0x269bee1c1ac28053238f7fe789f1ea2e481742d6d16ae78ed81e87c254af0765) + mstore(VK_LOOKUP_TABLE_3_X_SLOT, 0x1b1be7279d59445065a95f01f16686adfa798ec4f1e6845ffcec9b837e88372e) + mstore(VK_LOOKUP_TABLE_3_Y_SLOT, 0x057c90cb96d8259238ed86b05f629efd55f472a721efeeb56926e979433e6c0e) + + mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x12cd873a6f18a4a590a846d9ebf61565197edf457efd26bc408eb61b72f37b59) + mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x19890cbdac892682e7a5910ca6c238c082130e1c71f33d0c9c901153377770d1) + + mstore(VK_RECURSIVE_FLAG_SLOT, 1) + } + } +} diff --git a/ethereum/contracts/dev-contracts/test/VerifierTest.sol b/ethereum/contracts/dev-contracts/test/VerifierTest.sol index 230c706a65..2308256fcc 100644 --- a/ethereum/contracts/dev-contracts/test/VerifierTest.sol +++ b/ethereum/contracts/dev-contracts/test/VerifierTest.sol @@ -53,6 +53,8 @@ contract VerifierTest is Verifier { mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x12cd873a6f18a4a590a846d9ebf61565197edf457efd26bc408eb61b72f37b59) mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x19890cbdac892682e7a5910ca6c238c082130e1c71f33d0c9c901153377770d1) + + mstore(VK_RECURSIVE_FLAG_SLOT, 0) } } } diff --git a/ethereum/contracts/zksync/Verifier.sol b/ethereum/contracts/zksync/Verifier.sol index a5e8d05173..9c2a59bcff 100644 --- a/ethereum/contracts/zksync/Verifier.sol +++ b/ethereum/contracts/zksync/Verifier.sol @@ -66,161 +66,163 @@ contract Verifier is IVerifier { uint256 internal constant VK_LOOKUP_TABLE_TYPE_X_SLOT = 0x200 + 0x4c0; uint256 internal constant VK_LOOKUP_TABLE_TYPE_Y_SLOT = 0x200 + 0x4e0; + uint256 internal constant VK_RECURSIVE_FLAG_SLOT = 0x200 + 0x500; + /*////////////////////////////////////////////////////////////// Proof //////////////////////////////////////////////////////////////*/ - uint256 internal constant PROOF_PUBLIC_INPUT = 0x200 + 0x600 + 0x000; - - uint256 internal constant PROOF_STATE_POLYS_0_X_SLOT = 0x200 + 0x600 + 0x020; - uint256 internal constant PROOF_STATE_POLYS_0_Y_SLOT = 0x200 + 0x600 + 0x040; - uint256 internal constant PROOF_STATE_POLYS_1_X_SLOT = 0x200 + 0x600 + 0x060; - uint256 internal constant PROOF_STATE_POLYS_1_Y_SLOT = 0x200 + 0x600 + 0x080; - uint256 internal constant PROOF_STATE_POLYS_2_X_SLOT = 0x200 + 0x600 + 0x0a0; - uint256 internal constant PROOF_STATE_POLYS_2_Y_SLOT = 0x200 + 0x600 + 0x0c0; - uint256 internal constant PROOF_STATE_POLYS_3_X_SLOT = 0x200 + 0x600 + 0x0e0; - uint256 internal constant PROOF_STATE_POLYS_3_Y_SLOT = 0x200 + 0x600 + 0x100; - - uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_X_SLOT = 0x200 + 0x600 + 0x120; - uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_Y_SLOT = 0x200 + 0x600 + 0x140; - - uint256 internal constant PROOF_LOOKUP_S_POLY_X_SLOT = 0x200 + 0x600 + 0x160; - uint256 internal constant PROOF_LOOKUP_S_POLY_Y_SLOT = 0x200 + 0x600 + 0x180; - - uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_X_SLOT = 0x200 + 0x600 + 0x1a0; - uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_Y_SLOT = 0x200 + 0x600 + 0x1c0; - - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_0_X_SLOT = 0x200 + 0x600 + 0x1e0; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_0_Y_SLOT = 0x200 + 0x600 + 0x200; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_1_X_SLOT = 0x200 + 0x600 + 0x220; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_1_Y_SLOT = 0x200 + 0x600 + 0x240; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_2_X_SLOT = 0x200 + 0x600 + 0x260; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_2_Y_SLOT = 0x200 + 0x600 + 0x280; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_3_X_SLOT = 0x200 + 0x600 + 0x2a0; - uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_3_Y_SLOT = 0x200 + 0x600 + 0x2c0; - - uint256 internal constant PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x2e0; - uint256 internal constant PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x300; - uint256 internal constant PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x320; - uint256 internal constant PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x340; - - uint256 internal constant PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x600 + 0x360; - uint256 internal constant PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x380; - - uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x3a0; - uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x3c0; - uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x3e0; - - uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x600 + 0x400; - uint256 internal constant PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x600 + 0x420; - uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x600 + 0x440; - uint256 internal constant PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x460; - uint256 internal constant PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x600 + 0x480; - uint256 internal constant PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x4a0; - uint256 internal constant PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x4c0; - uint256 internal constant PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x4e0; - uint256 internal constant PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x500; - - uint256 internal constant PROOF_OPENING_PROOF_AT_Z_X_SLOT = 0x200 + 0x600 + 0x520; - uint256 internal constant PROOF_OPENING_PROOF_AT_Z_Y_SLOT = 0x200 + 0x600 + 0x540; - uint256 internal constant PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT = 0x200 + 0x600 + 0x560; - uint256 internal constant PROOF_OPENING_PROOF_AT_Z_OMEGA_Y_SLOT = 0x200 + 0x600 + 0x580; - - uint256 internal constant PROOF_RECURSIVE_PART_P1_X_SLOT = 0x200 + 0x600 + 0x5a0; - uint256 internal constant PROOF_RECURSIVE_PART_P1_Y_SLOT = 0x200 + 0x600 + 0x5c0; - - uint256 internal constant PROOF_RECURSIVE_PART_P2_X_SLOT = 0x200 + 0x600 + 0x5e0; - uint256 internal constant PROOF_RECURSIVE_PART_P2_Y_SLOT = 0x200 + 0x600 + 0x600; + uint256 internal constant PROOF_PUBLIC_INPUT = 0x200 + 0x520 + 0x000; + + uint256 internal constant PROOF_STATE_POLYS_0_X_SLOT = 0x200 + 0x520 + 0x020; + uint256 internal constant PROOF_STATE_POLYS_0_Y_SLOT = 0x200 + 0x520 + 0x040; + uint256 internal constant PROOF_STATE_POLYS_1_X_SLOT = 0x200 + 0x520 + 0x060; + uint256 internal constant PROOF_STATE_POLYS_1_Y_SLOT = 0x200 + 0x520 + 0x080; + uint256 internal constant PROOF_STATE_POLYS_2_X_SLOT = 0x200 + 0x520 + 0x0a0; + uint256 internal constant PROOF_STATE_POLYS_2_Y_SLOT = 0x200 + 0x520 + 0x0c0; + uint256 internal constant PROOF_STATE_POLYS_3_X_SLOT = 0x200 + 0x520 + 0x0e0; + uint256 internal constant PROOF_STATE_POLYS_3_Y_SLOT = 0x200 + 0x520 + 0x100; + + uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_X_SLOT = 0x200 + 0x520 + 0x120; + uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_Y_SLOT = 0x200 + 0x520 + 0x140; + + uint256 internal constant PROOF_LOOKUP_S_POLY_X_SLOT = 0x200 + 0x520 + 0x160; + uint256 internal constant PROOF_LOOKUP_S_POLY_Y_SLOT = 0x200 + 0x520 + 0x180; + + uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_X_SLOT = 0x200 + 0x520 + 0x1a0; + uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_Y_SLOT = 0x200 + 0x520 + 0x1c0; + + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_0_X_SLOT = 0x200 + 0x520 + 0x1e0; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_0_Y_SLOT = 0x200 + 0x520 + 0x200; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_1_X_SLOT = 0x200 + 0x520 + 0x220; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_1_Y_SLOT = 0x200 + 0x520 + 0x240; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_2_X_SLOT = 0x200 + 0x520 + 0x260; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_2_Y_SLOT = 0x200 + 0x520 + 0x280; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_3_X_SLOT = 0x200 + 0x520 + 0x2a0; + uint256 internal constant PROOF_QUOTIENT_POLY_PARTS_3_Y_SLOT = 0x200 + 0x520 + 0x2c0; + + uint256 internal constant PROOF_STATE_POLYS_0_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x2e0; + uint256 internal constant PROOF_STATE_POLYS_1_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x300; + uint256 internal constant PROOF_STATE_POLYS_2_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x320; + uint256 internal constant PROOF_STATE_POLYS_3_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x340; + + uint256 internal constant PROOF_STATE_POLYS_3_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x360; + uint256 internal constant PROOF_GATE_SELECTORS_0_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x380; + + uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_0_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x3a0; + uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_1_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x3c0; + uint256 internal constant PROOF_COPY_PERMUTATION_POLYS_2_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x3e0; + + uint256 internal constant PROOF_COPY_PERMUTATION_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x400; + uint256 internal constant PROOF_LOOKUP_S_POLY_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x420; + uint256 internal constant PROOF_LOOKUP_GRAND_PRODUCT_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x440; + uint256 internal constant PROOF_LOOKUP_T_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x460; + uint256 internal constant PROOF_LOOKUP_T_POLY_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x480; + uint256 internal constant PROOF_LOOKUP_SELECTOR_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x4a0; + uint256 internal constant PROOF_LOOKUP_TABLE_TYPE_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x4c0; + uint256 internal constant PROOF_QUOTIENT_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x4e0; + uint256 internal constant PROOF_LINEARISATION_POLY_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x500; + + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_X_SLOT = 0x200 + 0x520 + 0x520; + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_Y_SLOT = 0x200 + 0x520 + 0x540; + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT = 0x200 + 0x520 + 0x560; + uint256 internal constant PROOF_OPENING_PROOF_AT_Z_OMEGA_Y_SLOT = 0x200 + 0x520 + 0x580; + + uint256 internal constant PROOF_RECURSIVE_PART_P1_X_SLOT = 0x200 + 0x520 + 0x5a0; + uint256 internal constant PROOF_RECURSIVE_PART_P1_Y_SLOT = 0x200 + 0x520 + 0x5c0; + + uint256 internal constant PROOF_RECURSIVE_PART_P2_X_SLOT = 0x200 + 0x520 + 0x5e0; + uint256 internal constant PROOF_RECURSIVE_PART_P2_Y_SLOT = 0x200 + 0x520 + 0x600; /*////////////////////////////////////////////////////////////// Transcript slot //////////////////////////////////////////////////////////////*/ - uint256 internal constant TRANSCRIPT_BEGIN_SLOT = 0x200 + 0x600 + 0x620 + 0x00; - uint256 internal constant TRANSCRIPT_DST_BYTE_SLOT = 0x200 + 0x600 + 0x620 + 0x03; - uint256 internal constant TRANSCRIPT_STATE_0_SLOT = 0x200 + 0x600 + 0x620 + 0x04; - uint256 internal constant TRANSCRIPT_STATE_1_SLOT = 0x200 + 0x600 + 0x620 + 0x24; - uint256 internal constant TRANSCRIPT_CHALLENGE_SLOT = 0x200 + 0x600 + 0x620 + 0x44; + uint256 internal constant TRANSCRIPT_BEGIN_SLOT = 0x200 + 0x520 + 0x620 + 0x00; + uint256 internal constant TRANSCRIPT_DST_BYTE_SLOT = 0x200 + 0x520 + 0x620 + 0x03; + uint256 internal constant TRANSCRIPT_STATE_0_SLOT = 0x200 + 0x520 + 0x620 + 0x04; + uint256 internal constant TRANSCRIPT_STATE_1_SLOT = 0x200 + 0x520 + 0x620 + 0x24; + uint256 internal constant TRANSCRIPT_CHALLENGE_SLOT = 0x200 + 0x520 + 0x620 + 0x44; /*////////////////////////////////////////////////////////////// Partial verifier state //////////////////////////////////////////////////////////////*/ - uint256 internal constant STATE_ALPHA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x000; - uint256 internal constant STATE_BETA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x020; - uint256 internal constant STATE_GAMMA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x040; - uint256 internal constant STATE_POWER_OF_ALPHA_2_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x060; - uint256 internal constant STATE_POWER_OF_ALPHA_3_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x080; - uint256 internal constant STATE_POWER_OF_ALPHA_4_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x0a0; - uint256 internal constant STATE_POWER_OF_ALPHA_5_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x0c0; - uint256 internal constant STATE_POWER_OF_ALPHA_6_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x0e0; - uint256 internal constant STATE_POWER_OF_ALPHA_7_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x100; - uint256 internal constant STATE_POWER_OF_ALPHA_8_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x120; - uint256 internal constant STATE_ETA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x140; - uint256 internal constant STATE_BETA_LOOKUP_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x160; - uint256 internal constant STATE_GAMMA_LOOKUP_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x180; - uint256 internal constant STATE_BETA_PLUS_ONE_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x1a0; - uint256 internal constant STATE_BETA_GAMMA_PLUS_GAMMA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x1c0; - uint256 internal constant STATE_V_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x1e0; - uint256 internal constant STATE_U_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x200; - uint256 internal constant STATE_Z_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x220; - uint256 internal constant STATE_Z_MINUS_LAST_OMEGA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x240; - uint256 internal constant STATE_L_0_AT_Z_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x260; - uint256 internal constant STATE_L_N_MINUS_ONE_AT_Z_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x280; - uint256 internal constant STATE_Z_IN_DOMAIN_SIZE = 0x200 + 0x600 + 0x620 + 0x80 + 0x2a0; + uint256 internal constant STATE_ALPHA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x000; + uint256 internal constant STATE_BETA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x020; + uint256 internal constant STATE_GAMMA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x040; + uint256 internal constant STATE_POWER_OF_ALPHA_2_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x060; + uint256 internal constant STATE_POWER_OF_ALPHA_3_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x080; + uint256 internal constant STATE_POWER_OF_ALPHA_4_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x0a0; + uint256 internal constant STATE_POWER_OF_ALPHA_5_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x0c0; + uint256 internal constant STATE_POWER_OF_ALPHA_6_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x0e0; + uint256 internal constant STATE_POWER_OF_ALPHA_7_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x100; + uint256 internal constant STATE_POWER_OF_ALPHA_8_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x120; + uint256 internal constant STATE_ETA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x140; + uint256 internal constant STATE_BETA_LOOKUP_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x160; + uint256 internal constant STATE_GAMMA_LOOKUP_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x180; + uint256 internal constant STATE_BETA_PLUS_ONE_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x1a0; + uint256 internal constant STATE_BETA_GAMMA_PLUS_GAMMA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x1c0; + uint256 internal constant STATE_V_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x1e0; + uint256 internal constant STATE_U_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x200; + uint256 internal constant STATE_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x220; + uint256 internal constant STATE_Z_MINUS_LAST_OMEGA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x240; + uint256 internal constant STATE_L_0_AT_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x260; + uint256 internal constant STATE_L_N_MINUS_ONE_AT_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x280; + uint256 internal constant STATE_Z_IN_DOMAIN_SIZE = 0x200 + 0x520 + 0x620 + 0x80 + 0x2a0; /*////////////////////////////////////////////////////////////// Queries //////////////////////////////////////////////////////////////*/ - uint256 internal constant QUERIES_BUFFER_POINT_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x00; + uint256 internal constant QUERIES_BUFFER_POINT_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x00; - uint256 internal constant QUERIES_AT_Z_0_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x40; - uint256 internal constant QUERIES_AT_Z_0_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x60; - uint256 internal constant QUERIES_AT_Z_1_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x80; - uint256 internal constant QUERIES_AT_Z_1_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0xa0; + uint256 internal constant QUERIES_AT_Z_0_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x40; + uint256 internal constant QUERIES_AT_Z_0_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x60; + uint256 internal constant QUERIES_AT_Z_1_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x80; + uint256 internal constant QUERIES_AT_Z_1_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0xa0; - uint256 internal constant QUERIES_T_POLY_AGGREGATED_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0xc0; - uint256 internal constant QUERIES_T_POLY_AGGREGATED_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0xe0; + uint256 internal constant QUERIES_T_POLY_AGGREGATED_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0xc0; + uint256 internal constant QUERIES_T_POLY_AGGREGATED_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0xe0; /*////////////////////////////////////////////////////////////// Aggregated commitment //////////////////////////////////////////////////////////////*/ - uint256 internal constant AGGREGATED_AT_Z_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x00; - uint256 internal constant AGGREGATED_AT_Z_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x20; + uint256 internal constant AGGREGATED_AT_Z_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x00; + uint256 internal constant AGGREGATED_AT_Z_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x20; - uint256 internal constant AGGREGATED_AT_Z_OMEGA_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x40; - uint256 internal constant AGGREGATED_AT_Z_OMEGA_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x60; + uint256 internal constant AGGREGATED_AT_Z_OMEGA_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x40; + uint256 internal constant AGGREGATED_AT_Z_OMEGA_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x60; - uint256 internal constant AGGREGATED_OPENING_AT_Z_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x80; - uint256 internal constant AGGREGATED_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xa0; + uint256 internal constant AGGREGATED_OPENING_AT_Z_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x80; + uint256 internal constant AGGREGATED_OPENING_AT_Z_OMEGA_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xa0; /*////////////////////////////////////////////////////////////// Pairing data //////////////////////////////////////////////////////////////*/ - uint256 internal constant PAIRING_BUFFER_POINT_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x00; - uint256 internal constant PAIRING_BUFFER_POINT_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x20; + uint256 internal constant PAIRING_BUFFER_POINT_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x00; + uint256 internal constant PAIRING_BUFFER_POINT_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x20; uint256 internal constant PAIRING_PAIR_WITH_GENERATOR_X_SLOT = - 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x40; + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x40; uint256 internal constant PAIRING_PAIR_WITH_GENERATOR_Y_SLOT = - 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x60; + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0xc0 + 0x60; - uint256 internal constant PAIRING_PAIR_WITH_X_X_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0x80; - uint256 internal constant PAIRING_PAIR_WITH_X_Y_SLOT = 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xa0; + uint256 internal constant PAIRING_PAIR_WITH_X_X_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0x80; + uint256 internal constant PAIRING_PAIR_WITH_X_Y_SLOT = 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xa0; /*////////////////////////////////////////////////////////////// Slots for scalar multiplication optimizations //////////////////////////////////////////////////////////////*/ uint256 internal constant COPY_PERMUTATION_FIRST_AGGREGATED_COMMITMENT_COEFF = - 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xc0; + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xc0; uint256 internal constant LOOKUP_GRAND_PRODUCT_FIRST_AGGREGATED_COMMITMENT_COEFF = - 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xe0; + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0xe0; uint256 internal constant LOOKUP_S_FIRST_AGGREGATED_COMMITMENT_COEFF = - 0x200 + 0x600 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0x100; + 0x200 + 0x520 + 0x620 + 0x80 + 0x2c0 + 0x100 + 0x100 + 0x100; /*////////////////////////////////////////////////////////////// Constants @@ -239,7 +241,7 @@ contract Verifier is IVerifier { uint256 internal constant NON_RESIDUES_1 = 0x07; uint256 internal constant NON_RESIDUES_2 = 0x0a; - // g2 elements + // trusted setup g2 elements uint256 internal constant G2_ELEMENTS_0_X1 = 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2; uint256 internal constant G2_ELEMENTS_0_X2 = 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed; uint256 internal constant G2_ELEMENTS_0_Y1 = 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b; @@ -256,7 +258,7 @@ contract Verifier is IVerifier { assembly { let start := VK_GATE_SETUP_0_X_SLOT - let end := VK_LOOKUP_TABLE_TYPE_Y_SLOT + let end := VK_RECURSIVE_FLAG_SLOT let length := add(sub(end, start), 0x20) vkHash := keccak256(start, length) @@ -327,6 +329,9 @@ contract Verifier is IVerifier { // table type commitment mstore(VK_LOOKUP_TABLE_TYPE_X_SLOT, 0x006f5f6969088413e15abe92418dd7668c6c5fa2dcca168e71a98d84a9fe39de) mstore(VK_LOOKUP_TABLE_TYPE_Y_SLOT, 0x18550c804fadc55861b6a34d5341d594486833e62bd6137089f3335566ca40ee) + + // flag for using recursive part + mstore(VK_RECURSIVE_FLAG_SLOT, 0) } } @@ -502,7 +507,7 @@ contract Verifier is IVerifier { /// s(x*omega), t(z*omega), table_type(z) - lookup argument polynomial openings /// r(z) - linearisation polynomial opening /// - /// 4. Recursive proof (2 elliptic curve points over F_q) + /// 4. Recursive proof (0 or 2 elliptic curve points over F_q) function loadProof() { // 1. Load public input let offset := calldataload(0x04) @@ -662,24 +667,33 @@ contract Verifier is IVerifier { // 3. Load the recursive part of the proof offset := calldataload(0x44) let recursiveProofLengthInWords := calldataload(add(offset, 0x04)) - isValid := and(eq(recursiveProofLengthInWords, 4), isValid) - // PROOF_RECURSIVE_PART_P1 - { - let x := mod(calldataload(add(offset, 0x024)), Q_MOD) - let y := mod(calldataload(add(offset, 0x044)), Q_MOD) - let xx := mulmod(x, x, Q_MOD) - isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) - mstore(PROOF_RECURSIVE_PART_P1_X_SLOT, x) - mstore(PROOF_RECURSIVE_PART_P1_Y_SLOT, y) + + switch mload(VK_RECURSIVE_FLAG_SLOT) + case 0 { + // recursive part should be empty + isValid := and(iszero(recursiveProofLengthInWords), isValid) } - // PROOF_RECURSIVE_PART_P2 - { - let x := mod(calldataload(add(offset, 0x064)), Q_MOD) - let y := mod(calldataload(add(offset, 0x084)), Q_MOD) - let xx := mulmod(x, x, Q_MOD) - isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) - mstore(PROOF_RECURSIVE_PART_P2_X_SLOT, x) - mstore(PROOF_RECURSIVE_PART_P2_Y_SLOT, y) + default { + // recursive part should be consist of 2 points + isValid := and(eq(recursiveProofLengthInWords, 4), isValid) + // PROOF_RECURSIVE_PART_P1 + { + let x := mod(calldataload(add(offset, 0x024)), Q_MOD) + let y := mod(calldataload(add(offset, 0x044)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_RECURSIVE_PART_P1_X_SLOT, x) + mstore(PROOF_RECURSIVE_PART_P1_Y_SLOT, y) + } + // PROOF_RECURSIVE_PART_P2 + { + let x := mod(calldataload(add(offset, 0x064)), Q_MOD) + let y := mod(calldataload(add(offset, 0x084)), Q_MOD) + let xx := mulmod(x, x, Q_MOD) + isValid := and(eq(mulmod(y, y, Q_MOD), addmod(mulmod(x, xx, Q_MOD), 3, Q_MOD)), isValid) + mstore(PROOF_RECURSIVE_PART_P2_X_SLOT, x) + mstore(PROOF_RECURSIVE_PART_P2_Y_SLOT, y) + } } // Revert if a proof is not valid @@ -1587,6 +1601,7 @@ contract Verifier is IVerifier { /// where [F] and [E] were computed previously /// /// Also we need to check that e([P1], [x]_2) = e([P2], [1]_2) + /// if we have the recursive part of the proof /// where [P1] and [P2] are parts of the recursive proof /// /// We can aggregate both pairings into one for gas optimization: @@ -1617,10 +1632,12 @@ contract Verifier is IVerifier { pointMulAndAddIntoDest(PROOF_OPENING_PROOF_AT_Z_OMEGA_X_SLOT, u, PAIRING_PAIR_WITH_X_X_SLOT) pointNegate(PAIRING_PAIR_WITH_X_X_SLOT) - // Add recursive proof part - let uu := mulmod(u, u, R_MOD) - pointMulAndAddIntoDest(PROOF_RECURSIVE_PART_P1_X_SLOT, uu, PAIRING_PAIR_WITH_GENERATOR_X_SLOT) - pointMulAndAddIntoDest(PROOF_RECURSIVE_PART_P2_X_SLOT, uu, PAIRING_PAIR_WITH_X_X_SLOT) + // Add recursive proof part if needed + if mload(VK_RECURSIVE_FLAG_SLOT) { + let uu := mulmod(u, u, R_MOD) + pointMulAndAddIntoDest(PROOF_RECURSIVE_PART_P1_X_SLOT, uu, PAIRING_PAIR_WITH_GENERATOR_X_SLOT) + pointMulAndAddIntoDest(PROOF_RECURSIVE_PART_P2_X_SLOT, uu, PAIRING_PAIR_WITH_X_X_SLOT) + } // Calculate pairing { diff --git a/ethereum/test/unit_tests/verifier.spec.ts b/ethereum/test/unit_tests/verifier.spec.ts index 71859681e0..376af7385f 100644 --- a/ethereum/test/unit_tests/verifier.spec.ts +++ b/ethereum/test/unit_tests/verifier.spec.ts @@ -1,6 +1,6 @@ import * as hardhat from 'hardhat'; import { expect } from 'chai'; -import { VerifierTest, VerifierTestFactory } from '../../typechain'; +import { VerifierTest, VerifierRecursiveTest, VerifierTestFactory } from '../../typechain'; import { getCallRevertReason } from './utils'; import { ethers } from 'hardhat'; @@ -8,6 +8,209 @@ describe('Verifier test', function () { const Q_MOD = '21888242871839275222246405745257275088696311157297823662689037894645226208583'; const R_MOD = '21888242871839275222246405745257275088548364400416034343698204186575808495617'; + const PROOF = { + publicInputs: ['0x00461afd95c6bd5a38a01a995f5c292d19a816a139bbc78fc23321c3b8da6243'], + serializedProof: [ + '0x2b80ef6480b0c1a4ab9ccac1b1f5549d8d0e875e45f445599de5e1a88c3ccf25', + '0x173e23b955ea8f1972358bbeae3539d96e60494032faf3ada36fb3660f45d752', + '0x0579422893e75ebcf9ebfefd6bf80513bee55e16f0971779d774cca3227c11a3', + '0x257c35d228de381fa897042758ef80e4f29c84e8851878d12bae17d7700059e5', + '0x11cb7bc2927e1ffd32b7c0bf9b75e7f3f2915c33ca525bbb91a39d5ba9d050d1', + '0x0b396e2027a7e5cbffb8ef303560420c2ec2c25df1325b037208f61679596021', + '0x1d6feb9bfaf92d370a8041b1669fc901ac083c6f09d815df8e57e3bc0af529c6', + '0x1dd56a14ac384b74aab66e11dfeb36242a3d3c83c7fc11beed1ebb2d4b921aa3', + '0x07158e6a51b6354ab3355f298d5cc24948bddd48b0715eff52e0f135936536fc', + '0x18969b22583c701ef304d793e22d11a56ca9e5b08c20cd877b4fb142dfab852f', + '0x0c49d474877b03b231cb8aeb592728c93f6b5b62e357a4a77c7dd2818181fc43', + '0x186e08d590ce9937d193189a0c74890237df96ebc6593dc55b988eae74b9ea44', + '0x180772b6ef5bd078663a3ba77c3c997b0f9d6a62664a9aa35be4acfe5fd52acb', + '0x01e19ccd1fa25da95ce7799c6946a64eb12b04bb59fb31b0f48346e844ee06bb', + '0x0a991aee2dfdea382dd4ed65083c15004d812dcc6017aed812360c1a750f6994', + '0x2eba4d12e899bd433bc277127d3bb98997ea4953aa092705e185971c5bf95057', + '0x16ebb143325b1da3c88baf9f69a6911962c89cc34f364cb62f0db35e645baaa3', + '0x10a1806face2c2906455ac9060155bd648eb18f30a73f0d8214ef75683a2f015', + '0x2f153ebf44a9ebe05033a085c9c5a20ef002437420badd9723b59d9d9fed7666', + '0x054da7edbb7dd64940f64d5a46e6d2b70f8d16496657acf01d1bff905e70fe34', + '0x11a54b951c5f0120c00d6c0ad6b188f21c3d2b955ebea2578926eaf7b0607a34', + '0x2b5266f06d505e753e8ca5b9a4718f060ed1386313ef9c78b79f7f0474b3ecfc', + '0x202b9746f651068481021d43598dafcd8aa5e1c662de5baf24507cf8483e517f', + '0x0e4c150798976c5dbf261b2f50d43e2ae145eec6d63d361b79abdf5a875c7312', + '0x0d78beaef934700a7a3f63cc94f8ff11f056b770fc7f2e72f6cf2b7b29fb2298', + '0x26d892a58479bb3a147a7bfd8488ab1e6d97a89b647c886ace6d072134be3474', + '0x22ee472ea71eb002d8e3b35f93825ef831ab6d321eccc62ae4a1230449f05316', + '0x18b8f397a1a1db84ce0985252007c532c7d6f0454ef88a446180d6ab3b348321', + '0x0cbecff5b91f1da7dd1d440f7dd8c48726d7edd5cd119c8f2603fbfba03acd59', + '0x1f73e67e371a989ef56adc605ce4be99fb1a1200cdc9f15e1cbd9c825a400ed7', + '0x028667567deeadd469936a07962ba1c7215df0b9d27836cb1160088fc9e44b4c', + '0x17d4f2ed4b820a8222d2b839035ef0c26ee5ec8e8d2d1a7c16486e54240455cd', + '0x07a3089dc75c8035530c84d5067f481d42d2a095e9a8bb839c20909b5c978fcc', + '0x091c2be5555c05bb87116b667992af159e4ad0616c0ec7335570e26c6e627531', + '0x03c5e763840a185dbc363ed770645d8a0fef39736741848f12d90c3027d3fbfd', + '0x1f6e675ad9dd1cb9f92086111c47511f510e27c3632527d56c48be1c7b8a03e2', + '0x23aa0ab9bfb0e38ff029ba5a4cc6f4b8a1dde5b54b1db7435e22c9048ffa7029', + '0x19a6d569cc94a65fa3685ea1144db7415ceb1cabb11e267c35097dea637536d9', + '0x04dc0a7c7669340261725af51e4c32eb7f8968b163e70f0beccdf20bd7f771c1', + '0x1bf9dd4999e0e82da492c292fbb8287bcccd0cb3cd2f1de14f8b4a1592786715', + '0x257c2aa02452019ea981bc722f0777552be886772eea9a3bdf3257a1e3b75954', + '0x01b4dc62f39bdb3596ff653b6035e5fb17d278466ba4621a632962a7299523f1', + '0x0df615b627d9dd8e0d4d7f96c7e30f34d0cbda04c761c191d81cac19de41ccbd', + '0x1c22d1d281177a86617454edf488d6bb18c6a60222be2121091f4b18d4f5be92' + ], + recursiveAggregationInput: [] + }; + let verifier: VerifierTest; + + before(async function () { + const verifierFactory = await hardhat.ethers.getContractFactory('VerifierTest'); + const verifierContract = await verifierFactory.deploy(); + verifier = VerifierTestFactory.connect(verifierContract.address, verifierContract.signer); + }); + + it('Should verify proof', async () => { + // Call the verifier directly (though the call, not static call) to add the save the consumed gas into the statistic. + const calldata = verifier.interface.encodeFunctionData('verify', [ + PROOF.publicInputs, + PROOF.serializedProof, + PROOF.recursiveAggregationInput + ]); + await verifier.fallback({ data: calldata }); + + // Check that proof is verified + let result = await verifier.verify(PROOF.publicInputs, PROOF.serializedProof, PROOF.recursiveAggregationInput); + expect(result, 'proof verification failed').true; + }); + + describe('Should verify valid proof with fields values in non standard format', function () { + it('Public input with dirty bits over Fr mask', async () => { + let validProof = JSON.parse(JSON.stringify(PROOF)); + // Fill dirty bits + validProof.publicInputs[0] = ethers.BigNumber.from(validProof.publicInputs[0]) + .add('0xe000000000000000000000000000000000000000000000000000000000000000') + .toHexString(); + const result = await verifier.verify( + validProof.publicInputs, + validProof.serializedProof, + validProof.recursiveAggregationInput + ); + expect(result, 'proof verification failed').true; + }); + + it('Elliptic curve points over modulo', async () => { + let validProof = JSON.parse(JSON.stringify(PROOF)); + // Add modulo to points + validProof.serializedProof[0] = ethers.BigNumber.from(validProof.serializedProof[0]).add(Q_MOD); + validProof.serializedProof[1] = ethers.BigNumber.from(validProof.serializedProof[1]).add(Q_MOD).add(Q_MOD); + const result = await verifier.verify( + validProof.publicInputs, + validProof.serializedProof, + validProof.recursiveAggregationInput + ); + expect(result, 'proof verification failed').true; + }); + + it('Fr over modulo', async () => { + let validProof = JSON.parse(JSON.stringify(PROOF)); + // Add modulo to number + validProof.serializedProof[22] = ethers.BigNumber.from(validProof.serializedProof[22]).add(R_MOD); + const result = await verifier.verify( + validProof.publicInputs, + validProof.serializedProof, + validProof.recursiveAggregationInput + ); + expect(result, 'proof verification failed').true; + }); + }); + + describe('Should revert on invalid input', function () { + it('More than 1 public inputs', async () => { + let invalidProof = JSON.parse(JSON.stringify(PROOF)); + // Add one more public input to proof + invalidProof.publicInputs.push(invalidProof.publicInputs[0]); + const revertReason = await getCallRevertReason( + verifier.verify( + invalidProof.publicInputs, + invalidProof.serializedProof, + invalidProof.recursiveAggregationInput + ) + ); + expect(revertReason).equal('loadProof: Proof is invalid'); + }); + + it('Empty public inputs', async () => { + const revertReason = await getCallRevertReason( + verifier.verify([], PROOF.serializedProof, PROOF.recursiveAggregationInput) + ); + expect(revertReason).equal('loadProof: Proof is invalid'); + }); + + it('More than 44 words for proof', async () => { + let invalidProof = JSON.parse(JSON.stringify(PROOF)); + // Add one more "serialized proof" input + invalidProof.serializedProof.push(invalidProof.serializedProof[0]); + const revertReason = await getCallRevertReason( + verifier.verify( + invalidProof.publicInputs, + invalidProof.serializedProof, + invalidProof.recursiveAggregationInput + ) + ); + expect(revertReason).equal('loadProof: Proof is invalid'); + }); + + it('Empty serialized proof', async () => { + const revertReason = await getCallRevertReason( + verifier.verify(PROOF.publicInputs, [], PROOF.recursiveAggregationInput) + ); + expect(revertReason).equal('loadProof: Proof is invalid'); + }); + + it('Not empty recursive aggregation input', async () => { + let invalidProof = JSON.parse(JSON.stringify(PROOF)); + // Add one more "recursive aggregation input" value + invalidProof.recursiveAggregationInput.push(invalidProof.publicInputs[0]); + const revertReason = await getCallRevertReason( + verifier.verify( + invalidProof.publicInputs, + invalidProof.serializedProof, + invalidProof.recursiveAggregationInput + ) + ); + expect(revertReason).equal('loadProof: Proof is invalid'); + }); + + it('Elliptic curve point at infinity', async () => { + let invalidProof = JSON.parse(JSON.stringify(PROOF)); + // Change first point to point at infinity (encode as (0, 0) on EVM) + invalidProof.serializedProof[0] = ethers.constants.HashZero; + invalidProof.serializedProof[1] = ethers.constants.HashZero; + const revertReason = await getCallRevertReason( + verifier.verify( + invalidProof.publicInputs, + invalidProof.serializedProof, + invalidProof.recursiveAggregationInput + ) + ); + expect(revertReason).equal('loadProof: Proof is invalid'); + }); + }); + + it('Should failed with invalid public input', async () => { + const revertReason = await getCallRevertReason( + verifier.verify([ethers.constants.HashZero], PROOF.serializedProof, PROOF.recursiveAggregationInput) + ); + expect(revertReason).equal('invalid quotient evaluation'); + }); + + it('Should return correct Verification key hash', async () => { + const vksHash = await verifier.verificationKeyHash(); + expect(vksHash).equal('0x1400ce39763501f086ab7210ec2f020c0188cad25757ef756a7ba31fa62b1842'); + }); +}); + +describe('Verifier with recursive part test', function () { + const Q_MOD = '21888242871839275222246405745257275088696311157297823662689037894645226208583'; + const R_MOD = '21888242871839275222246405745257275088548364400416034343698204186575808495617'; + const PROOF = { publicInputs: ['0x00461afd95c6bd5a38a01a995f5c292d19a816a139bbc78fc23321c3b8da6243'], serializedProof: [ @@ -63,10 +266,10 @@ describe('Verifier test', function () { '0x0713c1371914ac18d7dced467a8a60eeca0f3d80a2cbd5dcc75abb6cbab39f39' ] }; - let verifier: VerifierTest; + let verifier: VerifierRecursiveTest; before(async function () { - const verifierFactory = await hardhat.ethers.getContractFactory('VerifierTest'); + const verifierFactory = await hardhat.ethers.getContractFactory('VerifierRecursiveTest'); const verifierContract = await verifierFactory.deploy(); verifier = VerifierTestFactory.connect(verifierContract.address, verifierContract.signer); }); @@ -222,6 +425,6 @@ describe('Verifier test', function () { it('Should return correct Verification key hash', async () => { const vksHash = await verifier.verificationKeyHash(); - expect(vksHash).equal('0x8a50c24dacf7e4d0e8cccb618261cbc775f7b8a2a1e5e794510b10ff42a49323'); + expect(vksHash).equal('0x941b4da215420ba6a39c1c94ada871e89749bd84fdeedd079acb3f0d0e1b2acd'); }); });