From 3606fc1d8f103b4f7174301f9a985ace2b89038d Mon Sep 17 00:00:00 2001 From: Ryan Doyle Date: Fri, 8 Nov 2024 03:36:56 -0700 Subject: [PATCH 01/23] feat(vm_executor): Add new histogram metric for gas per tx in vm_executor (#3215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Adds a new histogram metric representing total gas used per tx - Adds a new histogram metric representing the gas limit per failed tx ## Why ❔ - The existing metric is a histogram of rates. That's a little difficult to extract useful throughput numbers out of with the notable exception of efficiency. - Exporting this metric to let the consumer do their own: rate/anomoly calculation on gas/tx, ratio of gas burn for failed tx, etc. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/lib/vm_executor/src/batch/executor.rs | 9 ++++++--- core/lib/vm_executor/src/batch/metrics.rs | 10 ++++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/core/lib/vm_executor/src/batch/executor.rs b/core/lib/vm_executor/src/batch/executor.rs index 6dc9354fd7db..12b0718a4a56 100644 --- a/core/lib/vm_executor/src/batch/executor.rs +++ b/core/lib/vm_executor/src/batch/executor.rs @@ -99,11 +99,13 @@ where let elapsed = latency.observe(); if !res.tx_result.result.is_failed() { - let gas_per_nanosecond = - res.tx_result.statistics.computational_gas_used as f64 / elapsed.as_nanos() as f64; + let gas_used = res.tx_result.statistics.computational_gas_used; EXECUTOR_METRICS .computational_gas_per_nanosecond - .observe(gas_per_nanosecond); + .observe(gas_used as f64 / elapsed.as_nanos() as f64); + EXECUTOR_METRICS + .computational_gas_used + .observe(gas_used.into()); } else { // The amount of computational gas paid for failed transactions is hard to get // but comparing to the gas limit makes sense, since we can burn all gas @@ -111,6 +113,7 @@ where EXECUTOR_METRICS .failed_tx_gas_limit_per_nanosecond .observe(tx_gas_limit as f64 / elapsed.as_nanos() as f64); + EXECUTOR_METRICS.failed_tx_gas_limit.observe(tx_gas_limit); } Ok(res) } diff --git a/core/lib/vm_executor/src/batch/metrics.rs b/core/lib/vm_executor/src/batch/metrics.rs index 6851193e9be9..37f7997c31fd 100644 --- a/core/lib/vm_executor/src/batch/metrics.rs +++ b/core/lib/vm_executor/src/batch/metrics.rs @@ -21,6 +21,10 @@ const GAS_PER_NANOSECOND_BUCKETS: Buckets = Buckets::values(&[ 0.01, 0.03, 0.1, 0.3, 0.5, 0.75, 1., 1.5, 3., 5., 10., 20., 50., ]); +const GAS_USED_BUCKETS: Buckets = Buckets::values(&[ + 10000., 25000., 45000., 70000., 100000., 150000., 225000., 350000., 500000., +]); + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] #[metrics(label = "stage", rename_all = "snake_case")] pub(super) enum TxExecutionStage { @@ -37,8 +41,14 @@ pub(super) struct ExecutorMetrics { pub batch_executor_command_response_time: Family>, #[metrics(buckets = GAS_PER_NANOSECOND_BUCKETS)] pub computational_gas_per_nanosecond: Histogram, + /// Computational gas used, per transaction. + #[metrics(buckets = GAS_USED_BUCKETS)] + pub computational_gas_used: Histogram, #[metrics(buckets = GAS_PER_NANOSECOND_BUCKETS)] pub failed_tx_gas_limit_per_nanosecond: Histogram, + /// Gas limit, per failed transaction. + #[metrics(buckets = GAS_USED_BUCKETS)] + pub failed_tx_gas_limit: Histogram, /// Cumulative latency of interacting with the storage when executing a transaction /// in the batch executor. #[metrics(buckets = Buckets::LATENCIES)] From e874fbc567e929a0fb24b624a594db06ffaee385 Mon Sep 17 00:00:00 2001 From: Aleksandr Stepanov Date: Fri, 8 Nov 2024 11:56:40 +0100 Subject: [PATCH 02/23] ci: Try to get setup_2\26 key from cache (#3235) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Try to get etup_2\26 key from cache, instead of downloading it from bucket. ## Why ❔ Improve speed for download. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- .github/workflows/build-tee-prover-template.yml | 1 - .github/workflows/ci-common-reusable.yml | 3 +-- .github/workflows/ci-prover-e2e.yml | 2 +- .github/workflows/new-build-contract-verifier-template.yml | 6 +++++- .github/workflows/new-build-core-template.yml | 6 +++++- .github/workflows/new-build-prover-template.yml | 1 - 6 files changed, 12 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-tee-prover-template.yml b/.github/workflows/build-tee-prover-template.yml index 0e5b80d2e3a2..c55e06931247 100644 --- a/.github/workflows/build-tee-prover-template.yml +++ b/.github/workflows/build-tee-prover-template.yml @@ -76,4 +76,3 @@ jobs: docker push "${repo}/${tag}" done done - diff --git a/.github/workflows/ci-common-reusable.yml b/.github/workflows/ci-common-reusable.yml index ea91fc4a7cd6..d57630d3029a 100644 --- a/.github/workflows/ci-common-reusable.yml +++ b/.github/workflows/ci-common-reusable.yml @@ -28,7 +28,7 @@ jobs: run: | run_retried docker-compose -f ${RUNNER_COMPOSE_FILE} pull docker-compose -f ${RUNNER_COMPOSE_FILE} up --build -d zk postgres - + - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup @@ -38,4 +38,3 @@ jobs: # `zk lint prover` = cargo clippy, which does cargo check behind the scenes, which is a lightweight version of cargo build - name: Lints run: ci_run zkstack dev lint -t rs --check - diff --git a/.github/workflows/ci-prover-e2e.yml b/.github/workflows/ci-prover-e2e.yml index b0b9caf888fc..6076874c3710 100644 --- a/.github/workflows/ci-prover-e2e.yml +++ b/.github/workflows/ci-prover-e2e.yml @@ -100,7 +100,7 @@ jobs: - name: Kill prover & start compressor run: | sudo ./bin/prover_checkers/kill_prover - + ci_run zkstack prover run --component=compressor --docker=false &>prover_logs/compressor.log & - name: Wait for batch to be executed on L1 env: diff --git a/.github/workflows/new-build-contract-verifier-template.yml b/.github/workflows/new-build-contract-verifier-template.yml index 9b23cda6f02a..7e48968a65c1 100644 --- a/.github/workflows/new-build-contract-verifier-template.yml +++ b/.github/workflows/new-build-contract-verifier-template.yml @@ -176,7 +176,11 @@ jobs: - name: Download setup key shell: bash run: | - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + if [ -f "/setup_2^26.key" ]; then + cp '/setup_2^26.key' './setup_2^26.key' + else + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + fi - name: Set env vars shell: bash diff --git a/.github/workflows/new-build-core-template.yml b/.github/workflows/new-build-core-template.yml index c4aeb9180fda..350d689c4572 100644 --- a/.github/workflows/new-build-core-template.yml +++ b/.github/workflows/new-build-core-template.yml @@ -187,7 +187,11 @@ jobs: - name: Download setup key shell: bash run: | - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + if [ -f "/setup_2^26.key" ]; then + cp '/setup_2^26.key' './setup_2^26.key' + else + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + fi - name: Set env vars shell: bash diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml index 5d42696c0b2a..046711d679e8 100644 --- a/.github/workflows/new-build-prover-template.yml +++ b/.github/workflows/new-build-prover-template.yml @@ -127,7 +127,6 @@ jobs: if: matrix.components == 'proof-fri-gpu-compressor' run: | run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key - # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available # In our case it happens only when PR is created from fork - name: Wait for runner IP to be not rate-limited against GH API From 5bc70c85b1d752f69e75457729ccd07013e62e63 Mon Sep 17 00:00:00 2001 From: Aleksandr Stepanov Date: Fri, 8 Nov 2024 13:25:44 +0100 Subject: [PATCH 03/23] ci: Change schedule to monthly and add grouping to renovate (#3239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Change renovatebot pr schedule to monthly and grouping changes in one PR. ## Why ❔ Do not have a lot of PR's and save time to merge them and test. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- renovate.json | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/renovate.json b/renovate.json index eeccfee848dc..fd09d70ffe4b 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,13 @@ { - "extends": ["config:base", "schedule:earlyMondays","helpers:pinGitHubActionDigests"], + "extends": ["config:base", "helpers:pinGitHubActionDigests"], "enabledManagers": ["github-actions"], - "prCreation": "immediate" + "prCreation": "not-pending", + "groupName": "github actions monthly updates", + "schedule": ["monthly"], + "packageRules": [ + { + "managers": ["github-actions"], + "groupName": "all-github-actions-updates" + } + ] } From 25ec1c836eaba425c83091c48e728a77498b4e75 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 8 Nov 2024 12:26:34 +0000 Subject: [PATCH 04/23] chore(deps): update trufflesecurity/trufflehog digest to 781157a (#2881) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | trufflesecurity/trufflehog | action | digest | `0c66d30` -> `781157a` | --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/matter-labs/zksync-era). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/secrets_scanner.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/secrets_scanner.yaml b/.github/workflows/secrets_scanner.yaml index fa896bf10561..9bb1ad0a2722 100644 --- a/.github/workflows/secrets_scanner.yaml +++ b/.github/workflows/secrets_scanner.yaml @@ -11,7 +11,7 @@ jobs: with: fetch-depth: 0 - name: TruffleHog OSS - uses: trufflesecurity/trufflehog@0c66d30c1f4075cee1aada2e1ab46dabb1b0071a + uses: trufflesecurity/trufflehog@781157ae368b2218a0a56b889387dd26faa20f97 with: path: ./ base: ${{ github.event.repository.default_branch }} From fd27507168de97b734f7cfd330e53b455a527617 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Fri, 8 Nov 2024 14:46:08 +0200 Subject: [PATCH 05/23] refactor(vm): Narrow down factory deps returned in execution result (#3220) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Returns just dynamic factory deps (= EVM bytecodes deployed in a transaction) instead of all deployed bytecodes. - Tests this functionality in `multivm` and `state_keeper`. ## Why ❔ Returning all factory deps is redundant and makes thing awkward for old VM versions that don't support EVM emulation. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- Cargo.lock | 1 + core/lib/multivm/Cargo.toml | 1 + .../src/glue/types/vm/vm_block_result.rs | 14 +- .../types/vm/vm_partial_execution_result.rs | 8 +- .../glue/types/vm/vm_tx_execution_result.rs | 12 +- core/lib/multivm/src/versions/shadow/tests.rs | 17 +- .../src/versions/testonly/evm_emulator.rs | 180 +++++++++++++++- .../vm_1_4_1/implementation/execution.rs | 4 +- .../vm_1_4_2/implementation/execution.rs | 4 +- .../implementation/execution.rs | 4 +- .../src/versions/vm_fast/evm_deploy_tracer.rs | 4 +- .../versions/vm_fast/tests/evm_emulator.rs | 20 +- core/lib/multivm/src/versions/vm_fast/vm.rs | 55 +++-- .../vm_latest/implementation/execution.rs | 8 +- .../vm_latest/old_vm/oracles/decommitter.rs | 20 +- .../versions/vm_latest/tests/evm_emulator.rs | 20 +- .../vm_latest/tracers/evm_deploy_tracer.rs | 11 +- .../vm_latest/types/internals/vm_state.rs | 28 +-- .../src/versions/vm_latest/utils/mod.rs | 24 --- core/lib/multivm/src/versions/vm_latest/vm.rs | 24 ++- .../implementation/execution.rs | 4 +- .../implementation/execution.rs | 4 +- core/lib/vm_executor/src/oneshot/mock.rs | 8 +- .../src/types/outputs/execution_result.rs | 50 ++++- .../src/types/outputs/finished_l1batch.rs | 9 +- core/lib/vm_interface/src/utils/shadow.rs | 16 +- core/node/api_server/src/web3/tests/vm.rs | 5 +- core/node/state_keeper/src/io/persistence.rs | 3 +- core/node/state_keeper/src/io/tests/mod.rs | 204 ++++++++++++------ core/node/state_keeper/src/keeper.rs | 9 +- .../state_keeper/src/seal_criteria/mod.rs | 3 - core/node/state_keeper/src/testonly/mod.rs | 12 +- .../src/testonly/test_batch_executor.rs | 15 +- core/node/state_keeper/src/tests/mod.rs | 20 +- .../src/updates/l1_batch_updates.rs | 3 - .../src/updates/l2_block_updates.rs | 22 +- core/node/state_keeper/src/updates/mod.rs | 7 +- .../contracts/mock-evm/mock-evm.sol | 100 ++++++++- yarn.lock | 181 +++++++++++++++- 39 files changed, 843 insertions(+), 291 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c5cee452b1ae..65ae365e3a2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11418,6 +11418,7 @@ dependencies = [ "itertools 0.10.5", "once_cell", "pretty_assertions", + "rand 0.8.5", "test-casing", "thiserror", "tracing", diff --git a/core/lib/multivm/Cargo.toml b/core/lib/multivm/Cargo.toml index eb770bf9b57e..27130bc2720d 100644 --- a/core/lib/multivm/Cargo.toml +++ b/core/lib/multivm/Cargo.toml @@ -43,6 +43,7 @@ ethabi.workspace = true [dev-dependencies] assert_matches.workspace = true pretty_assertions.workspace = true +rand.workspace = true test-casing.workspace = true zksync_test_account.workspace = true zksync_eth_signer.workspace = true diff --git a/core/lib/multivm/src/glue/types/vm/vm_block_result.rs b/core/lib/multivm/src/glue/types/vm/vm_block_result.rs index 50bb19938fe7..c4eb0b1741aa 100644 --- a/core/lib/multivm/src/glue/types/vm/vm_block_result.rs +++ b/core/lib/multivm/src/glue/types/vm/vm_block_result.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use circuit_sequencer_api_1_3_3::sort_storage_access::sort_storage_access_queries as sort_storage_access_queries_1_3_3; use itertools::Itertools; use zk_evm_1_3_1::aux_structures::LogQuery as LogQuery_1_3_1; @@ -47,7 +49,7 @@ impl GlueFrom for crate::interface::Fi circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, final_execution_state: CurrentExecutionState { events: value.full_result.events, @@ -104,7 +106,7 @@ impl GlueFrom for crate::interface::Fi circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, final_execution_state: CurrentExecutionState { events: value.full_result.events, @@ -160,7 +162,7 @@ impl GlueFrom for crate::interface: circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, final_execution_state: CurrentExecutionState { events: value.full_result.events, @@ -230,7 +232,7 @@ impl GlueFrom circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -263,7 +265,7 @@ impl GlueFrom circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -312,7 +314,7 @@ impl GlueFrom circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } diff --git a/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs b/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs index 4c4cffcc6876..fa251116b85c 100644 --- a/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs +++ b/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use crate::glue::{GlueFrom, GlueInto}; impl GlueFrom @@ -22,7 +24,7 @@ impl GlueFrom gas_refunded: 0, operator_suggested_refund: 0, }, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -49,7 +51,7 @@ impl GlueFrom gas_refunded: 0, operator_suggested_refund: 0, }, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -76,7 +78,7 @@ impl GlueFrom gas_refunded: 0, operator_suggested_refund: 0, }, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } diff --git a/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs b/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs index 8978d4348edd..fcbcde990f37 100644 --- a/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs +++ b/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use crate::{ glue::{GlueFrom, GlueInto}, interface::{ @@ -66,14 +68,14 @@ impl GlueFrom VmExecutionResultAndLogs { result: ExecutionResult::Halt { reason: halt }, logs: Default::default(), statistics: Default::default(), refunds: Default::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, } } @@ -102,14 +104,14 @@ impl logs: Default::default(), statistics: Default::default(), refunds: Default::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, TxRevertReason::Halt(halt) => VmExecutionResultAndLogs { result: ExecutionResult::Halt { reason: halt }, logs: Default::default(), statistics: Default::default(), refunds: Default::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, } } @@ -133,7 +135,7 @@ impl GlueFrom { unreachable!("Halt is the only revert reason for VM 5") diff --git a/core/lib/multivm/src/versions/shadow/tests.rs b/core/lib/multivm/src/versions/shadow/tests.rs index e6fb05e24069..4466d96a96b7 100644 --- a/core/lib/multivm/src/versions/shadow/tests.rs +++ b/core/lib/multivm/src/versions/shadow/tests.rs @@ -231,7 +231,22 @@ mod evm_emulator { #[test] fn mock_emulator_with_deployment() { - test_mock_emulator_with_deployment::(); + test_mock_emulator_with_deployment::(false); + } + + #[test] + fn mock_emulator_with_reverted_deployment() { + test_mock_emulator_with_deployment::(true); + } + + #[test] + fn mock_emulator_with_recursive_deployment() { + test_mock_emulator_with_recursive_deployment::(); + } + + #[test] + fn mock_emulator_with_partial_reverts() { + test_mock_emulator_with_partial_reverts::(); } #[test] diff --git a/core/lib/multivm/src/versions/testonly/evm_emulator.rs b/core/lib/multivm/src/versions/testonly/evm_emulator.rs index 6de394842aaa..a77274ec581c 100644 --- a/core/lib/multivm/src/versions/testonly/evm_emulator.rs +++ b/core/lib/multivm/src/versions/testonly/evm_emulator.rs @@ -1,6 +1,8 @@ use std::collections::HashMap; +use assert_matches::assert_matches; use ethabi::Token; +use rand::{rngs::StdRng, Rng, SeedableRng}; use zksync_contracts::{load_contract, read_bytecode, SystemContractCode}; use zksync_system_constants::{ CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L2_BASE_TOKEN_ADDRESS, @@ -18,7 +20,8 @@ use zksync_utils::{ use super::{default_system_env, TestedVm, VmTester, VmTesterBuilder}; use crate::interface::{ - storage::InMemoryStorage, TxExecutionMode, VmExecutionResultAndLogs, VmInterfaceExt, + storage::InMemoryStorage, ExecutionResult, TxExecutionMode, VmExecutionResultAndLogs, + VmInterfaceExt, }; const MOCK_DEPLOYER_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/MockContractDeployer.json"; @@ -146,11 +149,26 @@ pub(crate) fn test_tracing_evm_contract_deployment() { .execute_transaction_with_bytecode_compression(deploy_tx, true); assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); - let new_known_factory_deps = vm_result.new_known_factory_deps.unwrap(); - assert_eq!(new_known_factory_deps.len(), 2); // the deployed EraVM contract + EVM contract + // The EraVM contract also deployed in a transaction should be filtered out assert_eq!( - new_known_factory_deps[&expected_bytecode_hash], - evm_bytecode + vm_result.dynamic_factory_deps, + HashMap::from([(expected_bytecode_hash, evm_bytecode)]) + ); + + // "Deploy" a bytecode in another transaction and check that the first tx doesn't interfere with the returned `dynamic_factory_deps`. + let args = [Token::Bytes((0..32).rev().collect())]; + let evm_bytecode = ethabi::encode(&args); + let expected_bytecode_hash = hash_evm_bytecode(&evm_bytecode); + let execute = Execute::for_deploy(expected_bytecode_hash, vec![0; 32], &args); + let deploy_tx = account.get_l2_tx_for_execute(execute, None); + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(deploy_tx, true); + assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); + + assert_eq!( + vm_result.dynamic_factory_deps, + HashMap::from([(expected_bytecode_hash, evm_bytecode)]) ); } @@ -310,7 +328,7 @@ pub(crate) fn test_calling_to_mock_emulator_from_native_contract() assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); } -pub(crate) fn test_mock_emulator_with_deployment() { +pub(crate) fn test_mock_emulator_with_deployment(revert: bool) { let contract_address = Address::repeat_byte(0xaa); let mut vm = EvmTestBuilder::new(true, contract_address) .with_mock_deployer() @@ -329,6 +347,7 @@ pub(crate) fn test_mock_emulator_with_deployment() { .encode_input(&[ Token::FixedBytes(new_evm_bytecode_hash.0.into()), Token::Bytes(new_evm_bytecode.clone()), + Token::Bool(revert), ]) .unwrap(), value: 0.into(), @@ -336,16 +355,159 @@ pub(crate) fn test_mock_emulator_with_deployment() { }, None, ); + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(test_tx, true); + + assert_eq!(vm_result.result.is_failed(), revert, "{vm_result:?}"); + let expected_dynamic_deps = if revert { + HashMap::new() + } else { + HashMap::from([(new_evm_bytecode_hash, new_evm_bytecode)]) + }; + assert_eq!(vm_result.dynamic_factory_deps, expected_dynamic_deps); + + // Test that a following transaction can decommit / call EVM contracts deployed in the previous transaction. + let test_fn = mock_emulator_abi + .function("testCallToPreviousDeployment") + .unwrap(); + let test_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: test_fn.encode_input(&[]).unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(test_tx, true); + + if revert { + assert_matches!( + &vm_result.result, + ExecutionResult::Revert { output } + if output.to_string().contains("contract code length") + ); + } else { + assert!(!vm_result.result.is_failed(), "{vm_result:?}"); + } + assert!(vm_result.dynamic_factory_deps.is_empty(), "{vm_result:?}"); +} + +fn encode_deployment(hash: H256, bytecode: Vec) -> Token { + assert_eq!(bytecode.len(), 32); + Token::Tuple(vec![ + Token::FixedBytes(hash.0.to_vec()), + Token::FixedBytes(bytecode), + ]) +} + +pub(crate) fn test_mock_emulator_with_recursive_deployment() { + let contract_address = Address::repeat_byte(0xaa); + let mut vm = EvmTestBuilder::new(true, contract_address) + .with_mock_deployer() + .build::(); + let account = &mut vm.rich_accounts[0]; + + let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let bytecodes: HashMap<_, _> = (0_u8..10) + .map(|byte| { + let bytecode = vec![byte; 32]; + (hash_evm_bytecode(&bytecode), bytecode) + }) + .collect(); + let test_fn = mock_emulator_abi + .function("testRecursiveDeployment") + .unwrap(); + let deployments: Vec<_> = bytecodes + .iter() + .map(|(hash, code)| encode_deployment(*hash, code.clone())) + .collect(); + let test_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: test_fn.encode_input(&[Token::Array(deployments)]).unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(test_tx, true); + assert!(!vm_result.result.is_failed(), "{vm_result:?}"); + assert_eq!(vm_result.dynamic_factory_deps, bytecodes); +} + +pub(crate) fn test_mock_emulator_with_partial_reverts() { + for seed in [1, 10, 100, 1_000] { + println!("Testing with RNG seed {seed}"); + let mut rng = StdRng::seed_from_u64(seed); + test_mock_emulator_with_partial_reverts_and_rng::(&mut rng); + } +} + +fn test_mock_emulator_with_partial_reverts_and_rng(rng: &mut impl Rng) { + let contract_address = Address::repeat_byte(0xaa); + let mut vm = EvmTestBuilder::new(true, contract_address) + .with_mock_deployer() + .build::(); + let account = &mut vm.rich_accounts[0]; + + let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let all_bytecodes: HashMap<_, _> = (0_u8..10) + .map(|_| { + let bytecode = vec![rng.gen(); 32]; + (hash_evm_bytecode(&bytecode), bytecode) + }) + .collect(); + let should_revert: Vec<_> = (0..10).map(|_| rng.gen::()).collect(); + + let test_fn = mock_emulator_abi + .function("testDeploymentWithPartialRevert") + .unwrap(); + let deployments: Vec<_> = all_bytecodes + .iter() + .map(|(hash, code)| encode_deployment(*hash, code.clone())) + .collect(); + let revert_tokens: Vec<_> = should_revert.iter().copied().map(Token::Bool).collect(); + + let test_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: test_fn + .encode_input(&[Token::Array(deployments), Token::Array(revert_tokens)]) + .unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + let (_, vm_result) = vm .vm .execute_transaction_with_bytecode_compression(test_tx, true); assert!(!vm_result.result.is_failed(), "{vm_result:?}"); - let factory_deps = vm_result.new_known_factory_deps.unwrap(); + let dynamic_deps = &vm_result.dynamic_factory_deps; assert_eq!( - factory_deps, - HashMap::from([(new_evm_bytecode_hash, new_evm_bytecode)]) + dynamic_deps.len(), + should_revert + .iter() + .map(|flag| !flag as usize) + .sum::(), + "{dynamic_deps:?}" ); + for ((bytecode_hash, bytecode), &should_revert) in all_bytecodes.iter().zip(&should_revert) { + assert_eq!( + dynamic_deps.get(bytecode_hash), + (!should_revert).then_some(bytecode), + "hash={bytecode_hash:?}, deps={dynamic_deps:?}" + ); + } } pub(crate) fn test_mock_emulator_with_delegate_call() { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs b/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs index cc199fef9416..35ff73071ca6 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_4_1::aux_structures::Timestamp; @@ -99,7 +99,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs b/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs index f6e49cd8b149..341584168be4 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use zk_evm_1_4_1::aux_structures::Timestamp; use crate::{ @@ -96,7 +98,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs index b8b939f86731..e942f0fc4245 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_4_0::aux_structures::Timestamp; @@ -93,7 +93,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs index 1bc0ff5134f1..62aba8df5b9b 100644 --- a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs @@ -16,8 +16,8 @@ use super::utils::read_fat_pointer; pub(super) struct DynamicBytecodes(Rc>>>); impl DynamicBytecodes { - pub(super) fn take(&self, hash: U256) -> Option> { - self.0.borrow_mut().remove(&hash) + pub(super) fn map(&self, hash: U256, f: impl FnOnce(&[u8]) -> R) -> Option { + self.0.borrow().get(&hash).map(|code| f(code)) } fn insert(&self, hash: U256, bytecode: Vec) { diff --git a/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs b/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs index cb7d54dba29f..7b5ea3e4447b 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs @@ -4,7 +4,8 @@ use crate::{ versions::testonly::evm_emulator::{ test_calling_to_mock_emulator_from_native_contract, test_mock_emulator_basics, test_mock_emulator_with_delegate_call, test_mock_emulator_with_deployment, - test_mock_emulator_with_payment, test_mock_emulator_with_recursion, + test_mock_emulator_with_partial_reverts, test_mock_emulator_with_payment, + test_mock_emulator_with_recursion, test_mock_emulator_with_recursive_deployment, test_mock_emulator_with_static_call, test_tracing_evm_contract_deployment, }, vm_fast::Vm, @@ -39,7 +40,22 @@ fn calling_to_mock_emulator_from_native_contract() { #[test] fn mock_emulator_with_deployment() { - test_mock_emulator_with_deployment::>(); + test_mock_emulator_with_deployment::>(false); +} + +#[test] +fn mock_emulator_with_reverted_deployment() { + test_mock_emulator_with_deployment::>(false); +} + +#[test] +fn mock_emulator_with_recursive_deployment() { + test_mock_emulator_with_recursive_deployment::>(); +} + +#[test] +fn mock_emulator_with_partial_reverts() { + test_mock_emulator_with_partial_reverts::>(); } #[test] diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index f90bac149d0f..d18f7b91f323 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -55,7 +55,6 @@ use crate::{ get_result_success_first_slot, get_vm_hook_params_start_position, get_vm_hook_position, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET, VM_HOOK_PARAMS_COUNT, }, - utils::extract_bytecodes_marked_as_known, MultiVMSubversion, }, }; @@ -653,8 +652,10 @@ impl Vm { // We need to filter out bytecodes the deployment of which may have been reverted; the tracer is not aware of reverts. // To do this, we check bytecodes against deployer events. - let factory_deps_marked_as_known = extract_bytecodes_marked_as_known(&logs.events); - let new_known_factory_deps = self.world.decommit_bytecodes(&factory_deps_marked_as_known); + let factory_deps_marked_as_known = VmEvent::extract_bytecodes_marked_as_known(&logs.events); + let dynamic_factory_deps = self + .world + .decommit_dynamic_bytecodes(factory_deps_marked_as_known); VmExecutionResultAndLogs { result: result.execution_result, @@ -671,7 +672,7 @@ impl Vm { total_log_queries: 0, }, refunds: result.refunds, - new_known_factory_deps: Some(new_known_factory_deps), + dynamic_factory_deps, } } } @@ -848,16 +849,15 @@ impl World { ) } - fn decommit_bytecodes(&self, hashes: &[H256]) -> HashMap> { - let bytecodes = hashes.iter().map(|&hash| { - let int_hash = h256_to_u256(hash); + fn decommit_dynamic_bytecodes( + &self, + candidate_hashes: impl Iterator, + ) -> HashMap> { + let bytecodes = candidate_hashes.filter_map(|hash| { let bytecode = self - .bytecode_cache - .get(&int_hash) - .cloned() - .or_else(|| self.dynamic_bytecodes.take(int_hash)) - .unwrap_or_else(|| panic!("Bytecode with hash {hash:?} not found")); - (hash, bytecode) + .dynamic_bytecodes + .map(h256_to_u256(hash), <[u8]>::to_vec)?; + Some((hash, bytecode)) }); bytecodes.collect() } @@ -933,17 +933,28 @@ impl zksync_vm2::World for World { self.program_cache .entry(hash) .or_insert_with(|| { - let bytecode = self.bytecode_cache.entry(hash).or_insert_with(|| { - // Since we put the bytecode in the cache anyway, it's safe to *take* it out from `dynamic_bytecodes` - // and put it in `bytecode_cache`. - self.dynamic_bytecodes - .take(hash) - .or_else(|| self.storage.load_factory_dep(u256_to_h256(hash))) + let cached = self + .bytecode_cache + .get(&hash) + .map(|code| Program::new(code, false)) + .or_else(|| { + self.dynamic_bytecodes + .map(hash, |code| Program::new(code, false)) + }); + + if let Some(cached) = cached { + cached + } else { + let code = self + .storage + .load_factory_dep(u256_to_h256(hash)) .unwrap_or_else(|| { panic!("VM tried to decommit nonexistent bytecode: {hash:?}"); - }) - }); - Program::new(bytecode, false) + }); + let program = Program::new(&code, false); + self.bytecode_cache.insert(hash, code); + program + } }) .clone() } diff --git a/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs b/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs index d9331720ce28..f8acfaec4259 100644 --- a/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs @@ -1,6 +1,7 @@ use std::mem; use zk_evm_1_5_0::aux_structures::Timestamp; +use zksync_vm_interface::VmEvent; use crate::{ interface::{ @@ -14,7 +15,6 @@ use crate::{ circuits_capacity::circuit_statistic_from_cycles, dispatcher::TracerDispatcher, DefaultExecutionTracer, PubdataTracer, RefundsTracer, }, - utils::extract_bytecodes_marked_as_known, vm::Vm, }, HistoryMode, @@ -101,8 +101,8 @@ impl Vm { circuit_statistic_from_cycles(tx_tracer.circuits_tracer.statistics), ); let result = tx_tracer.result_tracer.into_result(); - let factory_deps_marked_as_known = extract_bytecodes_marked_as_known(&logs.events); - let new_known_factory_deps = self.decommit_bytecodes(&factory_deps_marked_as_known); + let factory_deps_marked_as_known = VmEvent::extract_bytecodes_marked_as_known(&logs.events); + let dynamic_factory_deps = self.decommit_dynamic_bytecodes(factory_deps_marked_as_known); *dispatcher = tx_tracer.dispatcher; let result = VmExecutionResultAndLogs { @@ -110,7 +110,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: Some(new_known_factory_deps), + dynamic_factory_deps, }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs index d91fbfdb24df..507e3d8c7598 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs @@ -1,4 +1,7 @@ -use std::{collections::HashMap, fmt::Debug}; +use std::{ + collections::{HashMap, HashSet}, + fmt::Debug, +}; use zk_evm_1_5_0::{ abstractions::{DecommittmentProcessor, Memory, MemoryType}, @@ -27,6 +30,9 @@ pub struct DecommitterOracle { /// The cache of bytecodes that the bootloader "knows", but that are not necessarily in the database. /// And it is also used as a database cache. pub known_bytecodes: HistoryRecorder>, H>, + /// Subset of `known_bytecodes` that are dynamically deployed during VM execution. Currently, + /// only EVM bytecodes can be deployed like that. + pub dynamic_bytecode_hashes: HashSet, /// Stores pages of memory where certain code hashes have already been decommitted. /// It is expected that they all are present in the DB. // `decommitted_code_hashes` history is necessary @@ -40,6 +46,7 @@ impl DecommitterOracle { Self { storage, known_bytecodes: HistoryRecorder::default(), + dynamic_bytecode_hashes: HashSet::default(), decommitted_code_hashes: HistoryRecorder::default(), decommitment_requests: HistoryRecorder::default(), } @@ -76,6 +83,17 @@ impl DecommitterOracle { } } + pub fn insert_dynamic_bytecode( + &mut self, + bytecode_hash: U256, + bytecode: Vec, + timestamp: Timestamp, + ) { + self.dynamic_bytecode_hashes.insert(bytecode_hash); + self.known_bytecodes + .insert(bytecode_hash, bytecode, timestamp); + } + pub fn get_decommitted_bytecodes_after_timestamp(&self, timestamp: Timestamp) -> usize { // Note, that here we rely on the fact that for each used bytecode // there is one and only one corresponding event in the history of it. diff --git a/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs b/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs index b9b96c670983..5b6e24eefbf0 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs @@ -4,7 +4,8 @@ use crate::{ versions::testonly::evm_emulator::{ test_calling_to_mock_emulator_from_native_contract, test_mock_emulator_basics, test_mock_emulator_with_delegate_call, test_mock_emulator_with_deployment, - test_mock_emulator_with_payment, test_mock_emulator_with_recursion, + test_mock_emulator_with_partial_reverts, test_mock_emulator_with_payment, + test_mock_emulator_with_recursion, test_mock_emulator_with_recursive_deployment, test_mock_emulator_with_static_call, test_tracing_evm_contract_deployment, }, vm_latest::{HistoryEnabled, Vm}, @@ -39,7 +40,22 @@ fn calling_to_mock_emulator_from_native_contract() { #[test] fn mock_emulator_with_deployment() { - test_mock_emulator_with_deployment::>(); + test_mock_emulator_with_deployment::>(false); +} + +#[test] +fn mock_emulator_with_reverted_deployment() { + test_mock_emulator_with_deployment::>(true); +} + +#[test] +fn mock_emulator_with_recursive_deployment() { + test_mock_emulator_with_recursive_deployment::>(); +} + +#[test] +fn mock_emulator_with_partial_reverts() { + test_mock_emulator_with_partial_reverts::>(); } #[test] diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs index becc4f225276..61c8ef0b5abf 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs @@ -89,14 +89,13 @@ impl VmTracer for EvmDeployTracer { state: &mut ZkSyncVmState, _bootloader_state: &mut BootloaderState, ) -> TracerExecutionStatus { + let timestamp = Timestamp(state.local_state.timestamp); for published_bytecode in mem::take(&mut self.pending_bytecodes) { - let hash = hash_evm_bytecode(&published_bytecode); + let hash = h256_to_u256(hash_evm_bytecode(&published_bytecode)); let as_words = bytes_to_be_words(published_bytecode); - - state.decommittment_processor.populate( - vec![(h256_to_u256(hash), as_words)], - Timestamp(state.local_state.timestamp), - ); + state + .decommittment_processor + .insert_dynamic_bytecode(hash, as_words, timestamp); } TracerExecutionStatus::Continue } diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs index d25f66361f1b..90bb0c610e2c 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs @@ -83,27 +83,21 @@ pub(crate) fn new_vm_state( let mut memory = SimpleMemory::default(); let event_sink = InMemoryEventSink::default(); let precompiles_processor = PrecompilesProcessorWithHistory::::default(); + let mut decommittment_processor: DecommitterOracle = DecommitterOracle::new(storage); - - decommittment_processor.populate( - vec![( - h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), - )], - Timestamp(0), - ); - + let mut initial_bytecodes = vec![( + h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), + system_env + .base_system_smart_contracts + .default_aa + .code + .clone(), + )]; if let Some(evm_emulator) = &system_env.base_system_smart_contracts.evm_emulator { - decommittment_processor.populate( - vec![(h256_to_u256(evm_emulator.hash), evm_emulator.code.clone())], - Timestamp(0), - ); + initial_bytecodes.push((h256_to_u256(evm_emulator.hash), evm_emulator.code.clone())); } + decommittment_processor.populate(initial_bytecodes, Timestamp(0)); memory.populate( vec![( diff --git a/core/lib/multivm/src/versions/vm_latest/utils/mod.rs b/core/lib/multivm/src/versions/vm_latest/utils/mod.rs index aeb66755f514..97483633bc54 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/mod.rs @@ -1,9 +1,6 @@ //! Utility functions for the VM. -use once_cell::sync::Lazy; use zk_evm_1_5_0::aux_structures::MemoryPage; -use zksync_types::{H256, KNOWN_CODES_STORAGE_ADDRESS}; -use zksync_vm_interface::VmEvent; pub mod fee; pub mod l2_blocks; @@ -14,24 +11,3 @@ pub mod transaction_encoding; pub const fn heap_page_from_base(base: MemoryPage) -> MemoryPage { MemoryPage(base.0 + 2) } - -/// Extracts all bytecodes marked as known on the system contracts. -pub fn extract_bytecodes_marked_as_known(all_generated_events: &[VmEvent]) -> Vec { - static PUBLISHED_BYTECODE_SIGNATURE: Lazy = Lazy::new(|| { - ethabi::long_signature( - "MarkedAsKnown", - &[ethabi::ParamType::FixedBytes(32), ethabi::ParamType::Bool], - ) - }); - - all_generated_events - .iter() - .filter(|event| { - // Filter events from the deployer contract that match the expected signature. - event.address == KNOWN_CODES_STORAGE_ADDRESS - && event.indexed_topics.len() == 3 - && event.indexed_topics[0] == *PUBLISHED_BYTECODE_SIGNATURE - }) - .map(|event| event.indexed_topics[1]) - .collect() -} diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index ef6cee454a87..ff90eb14ee42 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -85,16 +85,24 @@ impl Vm { self.state.local_state.callstack.current.ergs_remaining } - pub(crate) fn decommit_bytecodes(&self, hashes: &[H256]) -> HashMap> { - let bytecodes = hashes.iter().map(|&hash| { - let bytecode_words = self - .state - .decommittment_processor + pub(crate) fn decommit_dynamic_bytecodes( + &self, + candidate_hashes: impl Iterator, + ) -> HashMap> { + let decommitter = &self.state.decommittment_processor; + let bytecodes = candidate_hashes.filter_map(|hash| { + let int_hash = h256_to_u256(hash); + if !decommitter.dynamic_bytecode_hashes.contains(&int_hash) { + return None; + } + let bytecode = decommitter .known_bytecodes .inner() - .get(&h256_to_u256(hash)) - .unwrap_or_else(|| panic!("Bytecode with hash {hash:?} not found")); - (hash, be_words_to_bytes(bytecode_words)) + .get(&int_hash) + .unwrap_or_else(|| { + panic!("Bytecode with hash {hash:?} not found"); + }); + Some((hash, be_words_to_bytes(bytecode))) }); bytecodes.collect() } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs index 9462a89be2ab..e8d19dfbba97 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_3_3::aux_structures::Timestamp; @@ -90,7 +90,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs index b1ad4d257b77..d3d511ed5398 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_3_3::aux_structures::Timestamp; @@ -88,7 +88,7 @@ impl Vm { .refund_tracer .map(|r| r.get_refunds()) .unwrap_or_default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; tx_tracer.dispatcher.save_results(&mut result); diff --git a/core/lib/vm_executor/src/oneshot/mock.rs b/core/lib/vm_executor/src/oneshot/mock.rs index e211328b5eca..89eaf3c75e29 100644 --- a/core/lib/vm_executor/src/oneshot/mock.rs +++ b/core/lib/vm_executor/src/oneshot/mock.rs @@ -74,13 +74,7 @@ impl MockOneshotExecutor { { Box::new( move |tx: &Transaction, env: &OneshotEnv| -> VmExecutionResultAndLogs { - VmExecutionResultAndLogs { - result: responses(tx, env), - logs: Default::default(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, - } + VmExecutionResultAndLogs::mock(responses(tx, env)) }, ) } diff --git a/core/lib/vm_interface/src/types/outputs/execution_result.rs b/core/lib/vm_interface/src/types/outputs/execution_result.rs index 018ea075db51..9bb784fbf71c 100644 --- a/core/lib/vm_interface/src/types/outputs/execution_result.rs +++ b/core/lib/vm_interface/src/types/outputs/execution_result.rs @@ -21,10 +21,6 @@ const L1_MESSAGE_EVENT_SIGNATURE: H256 = H256([ 58, 54, 228, 114, 145, 244, 32, 31, 175, 19, 127, 171, 8, 29, 146, 41, 91, 206, 45, 83, 190, 44, 108, 166, 139, 168, 44, 127, 170, 156, 226, 65, ]); -const PUBLISHED_BYTECODE_SIGNATURE: H256 = H256([ - 201, 71, 34, 255, 19, 234, 207, 83, 84, 124, 71, 65, 218, 181, 34, 131, 83, 160, 89, 56, 255, - 205, 213, 212, 162, 213, 51, 174, 14, 97, 130, 135, -]); pub fn bytecode_len_in_bytes(bytecodehash: H256) -> usize { usize::from(u16::from_be_bytes([bytecodehash[2], bytecodehash[3]])) * 32 @@ -50,6 +46,11 @@ impl VmEvent { 72, 13, 60, 159, 114, 123, 94, 92, 18, 3, 212, 198, 31, 177, 133, 211, 127, 8, 230, 178, 220, 94, 155, 191, 152, 89, 27, 26, 122, 221, 245, 124, ]); + /// Long signature of the known bytecodes storage bytecode publication event (`MarkedAsKnown`). + pub const PUBLISHED_BYTECODE_SIGNATURE: H256 = H256([ + 201, 71, 34, 255, 19, 234, 207, 83, 84, 124, 71, 65, 218, 181, 34, 131, 83, 160, 89, 56, + 255, 205, 213, 212, 162, 213, 51, 174, 14, 97, 130, 135, + ]); /// Extracts all the "long" L2->L1 messages that were submitted by the L1Messenger contract. pub fn extract_long_l2_to_l1_messages(events: &[Self]) -> Vec> { @@ -79,12 +80,25 @@ impl VmEvent { // Filter events from the deployer contract that match the expected signature. event.address == KNOWN_CODES_STORAGE_ADDRESS && event.indexed_topics.len() == 3 - && event.indexed_topics[0] == PUBLISHED_BYTECODE_SIGNATURE + && event.indexed_topics[0] == Self::PUBLISHED_BYTECODE_SIGNATURE && event.indexed_topics[2] != H256::zero() }) .map(|event| event.indexed_topics[1]) .collect() } + + /// Extracts all bytecodes marked as known on the system contracts. + pub fn extract_bytecodes_marked_as_known(events: &[Self]) -> impl Iterator + '_ { + events + .iter() + .filter(|event| { + // Filter events from the deployer contract that match the expected signature. + event.address == KNOWN_CODES_STORAGE_ADDRESS + && event.indexed_topics.len() == 3 + && event.indexed_topics[0] == Self::PUBLISHED_BYTECODE_SIGNATURE + }) + .map(|event| event.indexed_topics[1]) + } } /// Refunds produced for the user. @@ -120,10 +134,10 @@ pub struct VmExecutionResultAndLogs { pub logs: VmExecutionLogs, pub statistics: VmExecutionStatistics, pub refunds: Refunds, - /// Bytecodes decommitted during VM execution. `None` if not computed by the VM. - // FIXME: currently, this is only filled up by `vm_latest`; probably makes sense to narrow down - // to *dynamic* factory deps, so that `HashMap::new()` is a valid value for VMs not supporting EVM emulation. - pub new_known_factory_deps: Option>>, + /// Dynamic bytecodes decommitted during VM execution (i.e., not present in the storage at the start of VM execution + /// or in `factory_deps` fields of executed transactions). Currently, the only kind of such codes are EVM bytecodes. + /// Correspondingly, they may only be present if supported by the VM version, and if the VM is initialized with the EVM emulator base system contract. + pub dynamic_factory_deps: HashMap>, } #[derive(Debug, Clone, PartialEq)] @@ -144,6 +158,22 @@ impl ExecutionResult { } impl VmExecutionResultAndLogs { + /// Creates a mock full result based on the provided base result. + pub fn mock(result: ExecutionResult) -> Self { + Self { + result, + logs: VmExecutionLogs::default(), + statistics: VmExecutionStatistics::default(), + refunds: Refunds::default(), + dynamic_factory_deps: HashMap::new(), + } + } + + /// Creates a mock successful result with no payload. + pub fn mock_success() -> Self { + Self::mock(ExecutionResult::Success { output: vec![] }) + } + pub fn get_execution_metrics(&self, tx: Option<&Transaction>) -> VmExecutionMetrics { let contracts_deployed = tx .map(|tx| tx.execute.factory_deps.len() as u16) @@ -414,6 +444,6 @@ mod tests { "MarkedAsKnown", &[ethabi::ParamType::FixedBytes(32), ethabi::ParamType::Bool], ); - assert_eq!(PUBLISHED_BYTECODE_SIGNATURE, expected_signature); + assert_eq!(VmEvent::PUBLISHED_BYTECODE_SIGNATURE, expected_signature); } } diff --git a/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs b/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs index 8f7c1d4fb0d6..7e90d425ab15 100644 --- a/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs +++ b/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs @@ -1,7 +1,6 @@ use zksync_types::writes::StateDiffRecord; use super::{BootloaderMemory, CurrentExecutionState, VmExecutionResultAndLogs}; -use crate::{ExecutionResult, Refunds, VmExecutionLogs, VmExecutionStatistics}; /// State of the VM after the batch execution. #[derive(Debug, Clone)] @@ -21,13 +20,7 @@ pub struct FinishedL1Batch { impl FinishedL1Batch { pub fn mock() -> Self { FinishedL1Batch { - block_tip_execution_result: VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, - logs: VmExecutionLogs::default(), - statistics: VmExecutionStatistics::default(), - refunds: Refunds::default(), - new_known_factory_deps: None, - }, + block_tip_execution_result: VmExecutionResultAndLogs::mock_success(), final_execution_state: CurrentExecutionState { events: vec![], deduplicated_storage_logs: vec![], diff --git a/core/lib/vm_interface/src/utils/shadow.rs b/core/lib/vm_interface/src/utils/shadow.rs index 060c04298547..0883971f4de8 100644 --- a/core/lib/vm_interface/src/utils/shadow.rs +++ b/core/lib/vm_interface/src/utils/shadow.rs @@ -190,15 +190,13 @@ impl CheckDivergence for VmExecutionResultAndLogs { &other.statistics.computational_gas_used, ); - if let (Some(these_deps), Some(other_deps)) = - (&self.new_known_factory_deps, &other.new_known_factory_deps) - { - // Order deps to have a more reasonable diff on a mismatch - let these_deps = these_deps.iter().collect::>(); - let other_deps = other_deps.iter().collect::>(); - errors.check_match("new_known_factory_deps", &these_deps, &other_deps); - } - + // Order deps to have a more reasonable diff on a mismatch + let these_deps = self.dynamic_factory_deps.iter().collect::>(); + let other_deps = other + .dynamic_factory_deps + .iter() + .collect::>(); + errors.check_match("dynamic_factory_deps", &these_deps, &other_deps); errors } } diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index 7dd0164198a1..4e0426de7bfa 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -638,11 +638,8 @@ impl HttpTest for SendTransactionWithDetailedOutputTest { assert_eq!(env.l1_batch.first_l2_block.number, 1); VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, logs: vm_execution_logs.clone(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, + ..VmExecutionResultAndLogs::mock_success() } }); tx_executor diff --git a/core/node/state_keeper/src/io/persistence.rs b/core/node/state_keeper/src/io/persistence.rs index 06f1972a02aa..8bfd812c8a1f 100644 --- a/core/node/state_keeper/src/io/persistence.rs +++ b/core/node/state_keeper/src/io/persistence.rs @@ -379,7 +379,7 @@ impl StateKeeperOutputHandler for TreeWritesPersistence { #[cfg(test)] mod tests { - use std::collections::{HashMap, HashSet}; + use std::collections::HashSet; use assert_matches::assert_matches; use futures::FutureExt; @@ -510,7 +510,6 @@ mod tests { tx, tx_result, vec![], - HashMap::new(), BlockGasCount::default(), VmExecutionMetrics::default(), vec![], diff --git a/core/node/state_keeper/src/io/tests/mod.rs b/core/node/state_keeper/src/io/tests/mod.rs index adef238fe928..7196236475df 100644 --- a/core/node/state_keeper/src/io/tests/mod.rs +++ b/core/node/state_keeper/src/io/tests/mod.rs @@ -1,7 +1,4 @@ -use std::{ - collections::HashMap, - time::{Duration, SystemTime, UNIX_EPOCH}, -}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; use test_casing::test_casing; use zksync_contracts::BaseSystemContractsHashes; @@ -14,15 +11,19 @@ use zksync_multivm::{ utils::derive_base_fee_and_gas_per_pubdata, }; use zksync_node_test_utils::prepare_recovery_snapshot; +use zksync_system_constants::KNOWN_CODES_STORAGE_ADDRESS; use zksync_types::{ block::{BlockGasCount, L2BlockHasher}, - commitment::L1BatchCommitmentMode, + commitment::{L1BatchCommitmentMode, PubdataParams}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, l2::L2Tx, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, TransactionTimeRangeConstraint, H256, U256, }; -use zksync_utils::time::seconds_since_epoch; +use zksync_utils::{ + bytecode::{hash_bytecode, hash_evm_bytecode}, + time::seconds_since_epoch, +}; use self::tester::Tester; use crate::{ @@ -229,6 +230,29 @@ async fn l1_batch_timestamp_respects_prev_l2_block_with_clock_skew( test_timestamps_are_distinct(connection_pool, current_timestamp + 2, true, tester).await; } +fn create_block_seal_command( + l1_batch_number: L1BatchNumber, + l2_block: L2BlockUpdates, +) -> L2BlockSealCommand { + L2BlockSealCommand { + l1_batch_number, + l2_block, + first_tx_index: 0, + fee_account_address: Address::repeat_byte(0x23), + fee_input: BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { + l1_gas_price: 100, + fair_l2_gas_price: 100, + fair_pubdata_price: 100, + }), + base_fee_per_gas: 10, + base_system_contracts_hashes: BaseSystemContractsHashes::default(), + protocol_version: Some(ProtocolVersionId::latest()), + l2_legacy_shared_bridge_addr: Some(Address::default()), + pre_insert_txs: false, + pubdata_params: PubdataParams::default(), + } +} + #[tokio::test] async fn processing_storage_logs_when_sealing_l2_block() { let connection_pool = @@ -261,7 +285,6 @@ async fn processing_storage_logs_when_sealing_l2_block() { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); @@ -280,28 +303,11 @@ async fn processing_storage_logs_when_sealing_l2_block() { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); let l1_batch_number = L1BatchNumber(2); - let seal_command = L2BlockSealCommand { - l1_batch_number, - l2_block, - first_tx_index: 0, - fee_account_address: Address::repeat_byte(0x23), - fee_input: BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { - l1_gas_price: 100, - fair_l2_gas_price: 100, - fair_pubdata_price: 100, - }), - base_fee_per_gas: 10, - base_system_contracts_hashes: BaseSystemContractsHashes::default(), - protocol_version: Some(ProtocolVersionId::latest()), - l2_legacy_shared_bridge_addr: Some(Address::default()), - pre_insert_txs: false, - pubdata_params: Default::default(), - }; + let seal_command = create_block_seal_command(l1_batch_number, l2_block); connection_pool .connection() .await @@ -371,28 +377,11 @@ async fn processing_events_when_sealing_l2_block() { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); } - let seal_command = L2BlockSealCommand { - l1_batch_number, - l2_block, - first_tx_index: 0, - fee_account_address: Address::repeat_byte(0x23), - fee_input: BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { - l1_gas_price: 100, - fair_l2_gas_price: 100, - fair_pubdata_price: 100, - }), - base_fee_per_gas: 10, - base_system_contracts_hashes: BaseSystemContractsHashes::default(), - protocol_version: Some(ProtocolVersionId::latest()), - l2_legacy_shared_bridge_addr: Some(Address::default()), - pre_insert_txs: false, - pubdata_params: Default::default(), - }; + let seal_command = create_block_seal_command(l1_batch_number, l2_block); pool.connection() .await .unwrap() @@ -416,6 +405,114 @@ async fn processing_events_when_sealing_l2_block() { } } +fn bytecode_publishing_events( + l1_batch_number: L1BatchNumber, + tx_index: u32, + bytecode_hashes: impl Iterator, +) -> Vec { + bytecode_hashes + .map(|bytecode_hash| VmEvent { + location: (l1_batch_number, tx_index), + address: KNOWN_CODES_STORAGE_ADDRESS, + indexed_topics: vec![ + VmEvent::PUBLISHED_BYTECODE_SIGNATURE, + bytecode_hash, + H256::from_low_u64_be(1), // sentBytecodeToL1 + ], + value: vec![], + }) + .collect() +} + +#[tokio::test] +async fn processing_dynamic_factory_deps_when_sealing_l2_block() { + let pool = + ConnectionPool::::constrained_test_pool(L2BlockSealProcess::subtasks_len()).await; + let l1_batch_number = L1BatchNumber(2); + let l2_block_number = L2BlockNumber(3); + let mut l2_block = L2BlockUpdates::new( + 0, + l2_block_number, + H256::zero(), + 1, + ProtocolVersionId::latest(), + ); + + let static_factory_deps: Vec<_> = (0_u8..10) + .map(|byte| { + let era_bytecode = vec![byte; 32]; + (hash_bytecode(&era_bytecode), era_bytecode) + }) + .collect(); + let dynamic_factory_deps: Vec<_> = (0_u8..10) + .map(|byte| { + let evm_bytecode = vec![byte; 96]; + (hash_evm_bytecode(&evm_bytecode), evm_bytecode) + }) + .collect(); + let mut all_factory_deps = static_factory_deps.clone(); + all_factory_deps.extend_from_slice(&dynamic_factory_deps); + + let events = bytecode_publishing_events( + l1_batch_number, + 0, + static_factory_deps + .iter() + .chain(&dynamic_factory_deps) + .map(|(hash, _)| *hash), + ); + + let mut tx = create_transaction(10, 100); + tx.execute.factory_deps = static_factory_deps + .into_iter() + .map(|(_, bytecode)| bytecode) + .collect(); + let mut execution_result = create_execution_result([]); + execution_result.dynamic_factory_deps = dynamic_factory_deps.into_iter().collect(); + execution_result.logs.events = events; + l2_block.extend_from_executed_transaction( + tx, + execution_result, + BlockGasCount::default(), + VmExecutionMetrics::default(), + vec![], + vec![], + ); + + assert_eq!( + l2_block.new_factory_deps.len(), + all_factory_deps.len(), + "{:?}", + l2_block.new_factory_deps + ); + for (hash, bytecode) in &all_factory_deps { + assert_eq!( + l2_block.new_factory_deps.get(hash), + Some(bytecode), + "{hash:?}" + ); + } + + let seal_command = create_block_seal_command(l1_batch_number, l2_block); + pool.connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + seal_command.seal(pool.clone()).await.unwrap(); + + let mut conn = pool.connection().await.unwrap(); + let persisted_factory_deps = conn + .factory_deps_dal() + .dump_all_factory_deps_for_tests() + .await; + for (hash, bytecode) in &all_factory_deps { + assert_eq!(persisted_factory_deps.get(hash), Some(bytecode), "{hash:?}"); + } +} + #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCommitmentMode) { @@ -445,15 +542,7 @@ async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCom tx_filter.gas_per_pubdata, TransactionTimeRangeConstraint::default(), ); - storage - .transactions_dal() - .insert_transaction_l2( - &tx, - TransactionExecutionMetrics::default(), - ValidationTraces::default(), - ) - .await - .unwrap(); + insert_l2_transaction(&mut storage, &tx).await; let previous_batch_hash = mempool .load_batch_state_hash(snapshot_recovery.l1_batch_number) @@ -479,7 +568,6 @@ async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCom tx.into(), create_execution_result([]), vec![], - HashMap::new(), BlockGasCount::default(), VmExecutionMetrics::default(), vec![], @@ -603,15 +691,7 @@ async fn continue_unsealed_batch_on_restart(commitment_mode: L1BatchCommitmentMo tx_filter.gas_per_pubdata, TransactionTimeRangeConstraint::default(), ); - storage - .transactions_dal() - .insert_transaction_l2( - &tx, - TransactionExecutionMetrics::default(), - ValidationTraces::default(), - ) - .await - .unwrap(); + insert_l2_transaction(&mut storage, &tx).await; let old_l1_batch_params = mempool .wait_for_new_batch_params(&cursor, Duration::from_secs(10)) diff --git a/core/node/state_keeper/src/keeper.rs b/core/node/state_keeper/src/keeper.rs index 60e206038990..fe37ee8d8dd6 100644 --- a/core/node/state_keeper/src/keeper.rs +++ b/core/node/state_keeper/src/keeper.rs @@ -503,9 +503,8 @@ impl ZkSyncStateKeeper { updates_manager.extend_from_executed_transaction( tx, - *tx_result.clone(), + *tx_result, compressed_bytecodes, - tx_result.new_known_factory_deps.unwrap_or_default(), tx_l1_gas_this_tx, tx_execution_metrics, call_tracer_result, @@ -629,9 +628,8 @@ impl ZkSyncStateKeeper { } = *tx_metrics; updates_manager.extend_from_executed_transaction( tx, - *tx_result.clone(), + *tx_result, compressed_bytecodes, - tx_result.new_known_factory_deps.unwrap_or_default(), tx_l1_gas_this_tx, tx_execution_metrics, call_tracer_result, @@ -711,9 +709,8 @@ impl ZkSyncStateKeeper { } = *tx_metrics; updates_manager.extend_from_executed_transaction( tx, - *tx_result.clone(), + *tx_result, compressed_bytecodes, - tx_result.new_known_factory_deps.unwrap_or_default(), tx_l1_gas_this_tx, tx_execution_metrics, call_tracer_result, diff --git a/core/node/state_keeper/src/seal_criteria/mod.rs b/core/node/state_keeper/src/seal_criteria/mod.rs index b82d61666fbf..c10b01e7e73d 100644 --- a/core/node/state_keeper/src/seal_criteria/mod.rs +++ b/core/node/state_keeper/src/seal_criteria/mod.rs @@ -278,8 +278,6 @@ impl L2BlockMaxPayloadSizeSealer { #[cfg(test)] mod tests { - use std::collections::HashMap; - use zksync_utils::time::seconds_since_epoch; use super::*; @@ -290,7 +288,6 @@ mod tests { tx, create_execution_result([]), vec![], - HashMap::new(), BlockGasCount::default(), VmExecutionMetrics::default(), vec![], diff --git a/core/node/state_keeper/src/testonly/mod.rs b/core/node/state_keeper/src/testonly/mod.rs index ad50c8ca8ce6..b0f641ccbc1a 100644 --- a/core/node/state_keeper/src/testonly/mod.rs +++ b/core/node/state_keeper/src/testonly/mod.rs @@ -8,8 +8,8 @@ use zksync_dal::{ConnectionPool, Core, CoreDal as _}; use zksync_multivm::interface::{ executor::{BatchExecutor, BatchExecutorFactory}, storage::{InMemoryStorage, StorageView}, - BatchTransactionExecutionResult, ExecutionResult, FinishedL1Batch, L1BatchEnv, L2BlockEnv, - SystemEnv, VmExecutionResultAndLogs, + BatchTransactionExecutionResult, FinishedL1Batch, L1BatchEnv, L2BlockEnv, SystemEnv, + VmExecutionResultAndLogs, }; use zksync_state::OwnedStorage; use zksync_test_account::Account; @@ -28,13 +28,7 @@ pub(super) static BASE_SYSTEM_CONTRACTS: Lazy = /// Creates a `TxExecutionResult` object denoting a successful tx execution. pub(crate) fn successful_exec() -> BatchTransactionExecutionResult { BatchTransactionExecutionResult { - tx_result: Box::new(VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, - logs: Default::default(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, - }), + tx_result: Box::new(VmExecutionResultAndLogs::mock_success()), compressed_bytecodes: vec![], call_traces: vec![], } diff --git a/core/node/state_keeper/src/testonly/test_batch_executor.rs b/core/node/state_keeper/src/testonly/test_batch_executor.rs index 5fe05167504c..5625add021bf 100644 --- a/core/node/state_keeper/src/testonly/test_batch_executor.rs +++ b/core/node/state_keeper/src/testonly/test_batch_executor.rs @@ -258,14 +258,11 @@ pub(crate) fn random_upgrade_tx(tx_number: u64) -> ProtocolUpgradeTx { pub(crate) fn successful_exec_with_log() -> BatchTransactionExecutionResult { BatchTransactionExecutionResult { tx_result: Box::new(VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, logs: VmExecutionLogs { user_l2_to_l1_logs: vec![UserL2ToL1Log::default()], ..VmExecutionLogs::default() }, - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, + ..VmExecutionResultAndLogs::mock_success() }), compressed_bytecodes: vec![], call_traces: vec![], @@ -275,13 +272,9 @@ pub(crate) fn successful_exec_with_log() -> BatchTransactionExecutionResult { /// Creates a `TxExecutionResult` object denoting a tx that was rejected. pub(crate) fn rejected_exec(reason: Halt) -> BatchTransactionExecutionResult { BatchTransactionExecutionResult { - tx_result: Box::new(VmExecutionResultAndLogs { - result: ExecutionResult::Halt { reason }, - logs: Default::default(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, - }), + tx_result: Box::new(VmExecutionResultAndLogs::mock(ExecutionResult::Halt { + reason, + })), compressed_bytecodes: vec![], call_traces: vec![], } diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index 16eed0b2f7f7..28e2f9886b49 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -10,8 +10,8 @@ use tokio::sync::watch; use zksync_config::configs::chain::StateKeeperConfig; use zksync_multivm::{ interface::{ - ExecutionResult, Halt, L1BatchEnv, L2BlockEnv, Refunds, SystemEnv, TxExecutionMode, - VmExecutionLogs, VmExecutionResultAndLogs, VmExecutionStatistics, + Halt, L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode, VmExecutionLogs, + VmExecutionResultAndLogs, VmExecutionStatistics, }, vm_latest::constants::BATCH_COMPUTATIONAL_GAS_LIMIT, }; @@ -120,26 +120,16 @@ pub(super) fn create_execution_result( let total_log_queries = storage_logs.len() + 2; VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, logs: VmExecutionLogs { - events: vec![], - system_l2_to_l1_logs: vec![], - user_l2_to_l1_logs: vec![], storage_logs, total_log_queries_count: total_log_queries, + ..VmExecutionLogs::default() }, statistics: VmExecutionStatistics { - contracts_used: 0, - cycles_used: 0, - gas_used: 0, - gas_remaining: 0, - computational_gas_used: 0, total_log_queries, - pubdata_published: 0, - circuit_statistic: Default::default(), + ..VmExecutionStatistics::default() }, - refunds: Refunds::default(), - new_known_factory_deps: None, + ..VmExecutionResultAndLogs::mock_success() } } diff --git a/core/node/state_keeper/src/updates/l1_batch_updates.rs b/core/node/state_keeper/src/updates/l1_batch_updates.rs index 2979ebbd8c26..aa2e22cac483 100644 --- a/core/node/state_keeper/src/updates/l1_batch_updates.rs +++ b/core/node/state_keeper/src/updates/l1_batch_updates.rs @@ -49,8 +49,6 @@ impl L1BatchUpdates { #[cfg(test)] mod tests { - use std::collections::HashMap; - use zksync_multivm::vm_latest::TransactionVmExt; use zksync_types::{L2BlockNumber, ProtocolVersionId, H256}; @@ -78,7 +76,6 @@ mod tests { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); diff --git a/core/node/state_keeper/src/updates/l2_block_updates.rs b/core/node/state_keeper/src/updates/l2_block_updates.rs index 27995b384abe..6faa098d40a2 100644 --- a/core/node/state_keeper/src/updates/l2_block_updates.rs +++ b/core/node/state_keeper/src/updates/l2_block_updates.rs @@ -5,7 +5,7 @@ use zksync_multivm::{ Call, CompressedBytecodeInfo, ExecutionResult, L2BlockEnv, TransactionExecutionResult, TxExecutionStatus, VmEvent, VmExecutionMetrics, VmExecutionResultAndLogs, }, - vm_latest::{utils::extract_bytecodes_marked_as_known, TransactionVmExt}, + vm_latest::TransactionVmExt, }; use zksync_types::{ block::{BlockGasCount, L2BlockHasher}, @@ -88,16 +88,10 @@ impl L2BlockUpdates { tx_l1_gas_this_tx: BlockGasCount, execution_metrics: VmExecutionMetrics, compressed_bytecodes: Vec, - new_known_factory_deps: HashMap>, call_traces: Vec, ) { let saved_factory_deps = - extract_bytecodes_marked_as_known(&tx_execution_result.logs.events); - self.events.extend(tx_execution_result.logs.events); - self.user_l2_to_l1_logs - .extend(tx_execution_result.logs.user_l2_to_l1_logs); - self.system_l2_to_l1_logs - .extend(tx_execution_result.logs.system_l2_to_l1_logs); + VmEvent::extract_bytecodes_marked_as_known(&tx_execution_result.logs.events); let gas_refunded = tx_execution_result.refunds.gas_refunded; let operator_suggested_refund = tx_execution_result.refunds.operator_suggested_refund; @@ -129,10 +123,10 @@ impl L2BlockUpdates { .collect(); // Ensure that *dynamic* factory deps (ones that may be created when executing EVM contracts) // are added into the lookup map as well. - tx_factory_deps.extend(new_known_factory_deps); + tx_factory_deps.extend(tx_execution_result.dynamic_factory_deps); // Save all bytecodes that were marked as known in the bootloader - let known_bytecodes = saved_factory_deps.into_iter().map(|bytecode_hash| { + let known_bytecodes = saved_factory_deps.map(|bytecode_hash| { let bytecode = tx_factory_deps.get(&bytecode_hash).unwrap_or_else(|| { panic!( "Failed to get factory deps on tx: bytecode hash: {:?}, tx hash: {}", @@ -140,7 +134,7 @@ impl L2BlockUpdates { tx.hash() ) }); - (bytecode_hash, bytecode.to_vec()) + (bytecode_hash, bytecode.clone()) }); self.new_factory_deps.extend(known_bytecodes); @@ -149,6 +143,11 @@ impl L2BlockUpdates { self.txs_encoding_size += tx.bootloader_encoding_size(); self.payload_encoding_size += zksync_protobuf::repr::encode::(&tx).len(); + self.events.extend(tx_execution_result.logs.events); + self.user_l2_to_l1_logs + .extend(tx_execution_result.logs.user_l2_to_l1_logs); + self.system_l2_to_l1_logs + .extend(tx_execution_result.logs.system_l2_to_l1_logs); self.storage_logs .extend(tx_execution_result.logs.storage_logs); @@ -211,7 +210,6 @@ mod tests { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); diff --git a/core/node/state_keeper/src/updates/mod.rs b/core/node/state_keeper/src/updates/mod.rs index b1bd35c921ca..752963580e37 100644 --- a/core/node/state_keeper/src/updates/mod.rs +++ b/core/node/state_keeper/src/updates/mod.rs @@ -1,5 +1,3 @@ -use std::collections::HashMap; - use zksync_contracts::BaseSystemContractsHashes; use zksync_multivm::{ interface::{ @@ -10,7 +8,7 @@ use zksync_multivm::{ }; use zksync_types::{ block::BlockGasCount, commitment::PubdataParams, fee_model::BatchFeeInput, Address, - L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, H256, + L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, }; pub(crate) use self::{l1_batch_updates::L1BatchUpdates, l2_block_updates::L2BlockUpdates}; @@ -119,7 +117,6 @@ impl UpdatesManager { tx: Transaction, tx_execution_result: VmExecutionResultAndLogs, compressed_bytecodes: Vec, - new_known_factory_deps: HashMap>, tx_l1_gas_this_tx: BlockGasCount, execution_metrics: VmExecutionMetrics, call_traces: Vec, @@ -135,7 +132,6 @@ impl UpdatesManager { tx_l1_gas_this_tx, execution_metrics, compressed_bytecodes, - new_known_factory_deps, call_traces, ); latency.observe(); @@ -246,7 +242,6 @@ mod tests { tx, create_execution_result([]), vec![], - HashMap::new(), new_block_gas_count(), VmExecutionMetrics::default(), vec![], diff --git a/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol b/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol index baa0d37b7530..3a7ee40db228 100644 --- a/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol +++ b/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol @@ -90,6 +90,25 @@ contract MockContractDeployer { ACCOUNT_CODE_STORAGE_CONTRACT.storeAccountConstructedCodeHash(newAddress, _salt); return newAddress; } + + bytes32 constant CREATE2_PREFIX = keccak256("zksyncCreate2"); + + /// Mocks `create2` with real counterpart semantics, other than bytecode passed in `_input`. + /// @param _input bytecode to publish + function create2( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input + ) external payable returns (address newAddress) { + KNOWN_CODE_STORAGE_CONTRACT.setEVMBytecodeHash(_bytecodeHash); + KNOWN_CODE_STORAGE_CONTRACT.publishEVMBytecode(_input); + + bytes32 hash = keccak256( + bytes.concat(CREATE2_PREFIX, bytes32(uint256(uint160(msg.sender))), _salt, _bytecodeHash) + ); + newAddress = address(uint160(uint256(hash))); + ACCOUNT_CODE_STORAGE_CONTRACT.storeAccountConstructedCodeHash(newAddress, _bytecodeHash); + } } interface IAccountCodeStorage { @@ -101,6 +120,16 @@ interface IRecursiveContract { function recurse(uint _depth) external returns (uint); } +interface IRecursiveDeployment { + struct EvmDeployment { + bytes32 bytecodeHash; + /// Has fixed length to enable array slicing. + bytes32 bytecode; + } + + function testRecursiveDeployment(EvmDeployment[] calldata _deployments) external; +} + /// Native incrementing library. Not actually a library to simplify deployment. contract IncrementingContract { // Should not collide with other storage slots @@ -154,7 +183,7 @@ uint constant EVM_EMULATOR_STIPEND = 1 << 30; /** * Mock EVM emulator used in low-level tests. */ -contract MockEvmEmulator is IRecursiveContract, IncrementingContract { +contract MockEvmEmulator is IRecursiveContract, IRecursiveDeployment, IncrementingContract { IAccountCodeStorage constant ACCOUNT_CODE_STORAGE_CONTRACT = IAccountCodeStorage(address(0x8002)); /// Set to `true` for testing logic sanity. @@ -210,7 +239,11 @@ contract MockEvmEmulator is IRecursiveContract, IncrementingContract { MockContractDeployer constant CONTRACT_DEPLOYER_CONTRACT = MockContractDeployer(address(0x8006)); /// Emulates EVM contract deployment and a subsequent call to it in a single transaction. - function testDeploymentAndCall(bytes32 _evmBytecodeHash, bytes calldata _evmBytecode) external validEvmEntry { + function testDeploymentAndCall( + bytes32 _evmBytecodeHash, + bytes calldata _evmBytecode, + bool _revert + ) external validEvmEntry { IRecursiveContract newContract = IRecursiveContract(CONTRACT_DEPLOYER_CONTRACT.create( _evmBytecodeHash, _evmBytecodeHash, @@ -222,6 +255,69 @@ contract MockEvmEmulator is IRecursiveContract, IncrementingContract { uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; require(newContract.recurse{gas: gasToSend}(5) == 120, "unexpected recursive result"); + require(!_revert, "requested revert"); + } + + function testCallToPreviousDeployment() external validEvmEntry { + IRecursiveContract newContract = IRecursiveContract(address(uint160(address(this)) + 1)); + require(address(newContract).code.length > 0, "contract code length"); + require(address(newContract).codehash != bytes32(0), "contract code hash"); + + uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; + require(newContract.recurse{gas: gasToSend}(5) == 120, "unexpected recursive result"); + } + + function testRecursiveDeployment(EvmDeployment[] calldata _deployments) external override validEvmEntry { + if (_deployments.length == 0) { + return; + } + + IRecursiveDeployment newContract = IRecursiveDeployment(CONTRACT_DEPLOYER_CONTRACT.create( + _deployments[0].bytecodeHash, + _deployments[0].bytecodeHash, + bytes.concat(_deployments[0].bytecode) + )); + uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; + newContract.testRecursiveDeployment{gas: gasToSend}(_deployments[1:]); + } + + function testDeploymentWithPartialRevert( + EvmDeployment[] calldata _deployments, + bool[] calldata _shouldRevert + ) external validEvmEntry { + require(_deployments.length == _shouldRevert.length, "length mismatch"); + + for (uint i = 0; i < _deployments.length; i++) { + uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; + try this.deployThenRevert{gas: gasToSend}( + _deployments[i], + bytes32(i), + _shouldRevert[i] + ) returns(address newAddress) { + require(!_shouldRevert[i], "unexpected deploy success"); + require(newAddress.code.length > 0, "contract code length"); + require(newAddress.codehash != bytes32(0), "contract code hash"); + } catch Error(string memory reason) { + require(_shouldRevert[i], "unexpected revert"); + require(keccak256(bytes(reason)) == keccak256("requested revert"), "unexpected error"); + } + } + } + + function deployThenRevert( + EvmDeployment calldata _deployment, + bytes32 _salt, + bool _shouldRevert + ) external validEvmEntry returns (address newAddress) { + newAddress = CONTRACT_DEPLOYER_CONTRACT.create2( + _salt, + _deployment.bytecodeHash, + bytes.concat(_deployment.bytecode) + ); + require(newAddress.code.length > 0, "contract code length"); + require(newAddress.codehash != bytes32(0), "contract code hash"); + + require(!_shouldRevert, "requested revert"); } fallback() external validEvmEntry { diff --git a/yarn.lock b/yarn.lock index 58511dd1b9ff..15fb8bb7d967 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1424,6 +1424,18 @@ resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -2303,6 +2315,11 @@ resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.6.tgz#2a880a24eb19b4f8b25adc2a5095f2aa27f39677" integrity sha512-xSmezSupL+y9VkHZJGDoCBpmnB2ogM13ccaYDWqJTfS3dbuHkgjuwDFUmaFauBCboQMGB/S5UqUl2y54X99BmA== +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + "@pkgr/core@^0.1.0": version "0.1.1" resolved "https://registry.yarnpkg.com/@pkgr/core/-/core-0.1.1.tgz#1ec17e2edbec25c8306d424ecfbf13c7de1aaa31" @@ -2633,6 +2650,16 @@ mkdirp "^2.1.6" path-browserify "^1.0.1" +"@ts-morph/common@~0.23.0": + version "0.23.0" + resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.23.0.tgz#bd4ddbd3f484f29476c8bd985491592ae5fc147e" + integrity sha512-m7Lllj9n/S6sOkCkRftpM7L24uvmfXQFedlW/4hENcuJH1HHm9u5EgxZb9uVjQSCGrbBWBkOGgcTxNg36r6ywA== + dependencies: + fast-glob "^3.3.2" + minimatch "^9.0.3" + mkdirp "^3.0.1" + path-browserify "^1.0.1" + "@tsconfig/node10@^1.0.7": version "1.0.11" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" @@ -3305,6 +3332,11 @@ ansi-regex@^5.0.1: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== +ansi-regex@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" + integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== + ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -3324,6 +3356,11 @@ ansi-styles@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + antlr4@^4.11.0: version "4.13.1" resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.13.1.tgz#1e0a1830a08faeb86217cb2e6c34716004e4253d" @@ -4183,6 +4220,11 @@ code-block-writer@^12.0.0: resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-12.0.0.tgz#4dd58946eb4234105aff7f0035977b2afdc2a770" integrity sha512-q4dMFMlXtKR3XNBHyMHt/3pwYNA69EDk00lloMOaaUMKPUXBw6lpXtbu3MMVG6/uOihGnRDOlkyqsONEUj60+w== +code-block-writer@^13.0.1: + version "13.0.3" + resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-13.0.3.tgz#90f8a84763a5012da7af61319dd638655ae90b5b" + integrity sha512-Oofo0pq3IKnsFtuHqSF7TqBfr71aeyZDVJ0HpmqB7FBM2qEigL0iPONSCZSO9pE9dZTAxANe5XHG9Uy0YMv8cg== + collect-v8-coverage@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" @@ -4435,7 +4477,7 @@ cross-spawn@^6.0.5: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -4738,6 +4780,11 @@ dotenv@^8.2.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" @@ -4804,6 +4851,11 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + encoding-down@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-6.3.0.tgz#b1c4eb0e1728c146ecaef8e32963c549e76d082b" @@ -5774,6 +5826,14 @@ for-each@^0.3.3: dependencies: is-callable "^1.1.3" +foreground-child@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.3.0.tgz#0ac8644c06e431439f8561db8ecf29a7b5519c77" + integrity sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -6062,6 +6122,18 @@ glob@8.1.0, glob@^8.0.3: minimatch "^5.0.1" once "^1.3.0" +glob@^10.4.1: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^1.11.1" + glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" @@ -6974,6 +7046,15 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" +jackspeak@^3.1.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a" + integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + jest-changed-files@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" @@ -7877,6 +7958,11 @@ lowercase-keys@^3.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ== +lru-cache@^10.2.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -8175,6 +8261,13 @@ minimatch@^7.4.3: dependencies: brace-expansion "^2.0.1" +minimatch@^9.0.3, minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + minimatch@~3.0.4: version "3.0.8" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.8.tgz#5e6a59bd11e2ab0de1cfb843eb2d82e546c321c1" @@ -8187,6 +8280,11 @@ minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8, minimist@~1. resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + mkdirp-classic@^0.5.2: version "0.5.3" resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" @@ -8209,6 +8307,11 @@ mkdirp@^2.1.6: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-2.1.6.tgz#964fbcb12b2d8c5d6fbc62a963ac95a273e2cc19" integrity sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A== +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + mnemonist@^0.38.0: version "0.38.5" resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.5.tgz#4adc7f4200491237fe0fa689ac0b86539685cade" @@ -8664,6 +8767,11 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== +package-json-from-dist@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" + integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== + package-json@^8.1.0: version "8.1.1" resolved "https://registry.yarnpkg.com/package-json/-/package-json-8.1.1.tgz#3e9948e43df40d1e8e78a85485f1070bf8f03dc8" @@ -8739,6 +8847,14 @@ path-parse@^1.0.6, path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-to-regexp@^6.2.1: version "6.2.2" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.2.tgz#324377a83e5049cbecadc5554d6a63a9a4866b36" @@ -9739,6 +9855,11 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + sinon-chai@^3.7.0: version "3.7.0" resolved "https://registry.yarnpkg.com/sinon-chai/-/sinon-chai-3.7.0.tgz#cfb7dec1c50990ed18c153f1840721cf13139783" @@ -10070,6 +10191,15 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^2.1.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" @@ -10087,6 +10217,15 @@ string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2 is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + string.prototype.padend@^3.0.0: version "3.1.6" resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.1.6.tgz#ba79cf8992609a91c872daa47c6bb144ee7f62a5" @@ -10144,6 +10283,13 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" @@ -10165,6 +10311,13 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" @@ -10520,6 +10673,14 @@ ts-morph@^19.0.0: "@ts-morph/common" "~0.20.0" code-block-writer "^12.0.0" +ts-morph@^22.0.0: + version "22.0.0" + resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-22.0.0.tgz#5532c592fb6dddae08846f12c9ab0fc590b1d42e" + integrity sha512-M9MqFGZREyeb5fTl6gNHKZLqBQA0TjA1lea+CR48R8EBTDuWrNqW6ccC5QvjNR4s6wDumD3LTCjOFSp9iwlzaw== + dependencies: + "@ts-morph/common" "~0.23.0" + code-block-writer "^13.0.1" + ts-node@^10.1.0, ts-node@^10.7.0: version "10.9.2" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" @@ -11000,6 +11161,15 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -11009,6 +11179,15 @@ wrap-ansi@^7.0.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" From 4509179f62ead4b837dfb67760f52de76fac2e37 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Fri, 8 Nov 2024 15:55:59 +0200 Subject: [PATCH 06/23] feat(contract-verifier): Adapt contract verifier API for EVM bytecodes (#3234) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Adapts contract verifier APIs to work with EVM bytecodes; adds corresponding request correctness checks. - Brushes up the verifier API server in general. ## Why ❔ Part of the efforts to support EVM bytecode verification. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- Cargo.lock | 12 +- Cargo.toml | 1 + core/bin/contract-verifier/Cargo.toml | 5 +- core/bin/contract-verifier/src/main.rs | 53 +-- core/lib/contract_verifier/src/lib.rs | 112 ++++-- core/lib/contract_verifier/src/resolver.rs | 19 +- core/lib/contract_verifier/src/tests/mod.rs | 45 ++- core/lib/contract_verifier/src/tests/real.rs | 40 +- ...6b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json | 2 +- ...5094de508af93f4085be7cf3b54b1e8ecdadd.json | 2 +- ...make_zk_compiler_version_nullable.down.sql | 2 + ...2_make_zk_compiler_version_nullable.up.sql | 2 + core/lib/dal/src/contract_verification_dal.rs | 86 ++++- .../models/storage_verification_request.rs | 2 +- core/lib/dal/src/storage_logs_dal.rs | 65 +--- core/lib/dal/src/tokens_dal.rs | 2 +- .../types/src/contract_verification_api.rs | 14 +- .../contract_verification_server/Cargo.toml | 10 +- .../src/api_decl.rs | 11 +- .../src/api_impl.rs | 247 +++++++----- .../contract_verification_server/src/cache.rs | 122 ++++++ .../contract_verification_server/src/lib.rs | 9 +- .../contract_verification_server/src/tests.rs | 356 ++++++++++++++++++ .../layers/contract_verification_api.rs | 2 +- 24 files changed, 923 insertions(+), 298 deletions(-) create mode 100644 core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql create mode 100644 core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql create mode 100644 core/node/contract_verification_server/src/cache.rs create mode 100644 core/node/contract_verification_server/src/tests.rs diff --git a/Cargo.lock b/Cargo.lock index 65ae365e3a2b..04a863448d69 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10784,15 +10784,18 @@ version = "0.1.0" dependencies = [ "anyhow", "axum 0.7.7", - "serde", + "http-body-util", "serde_json", + "test-casing", "tokio", + "tower 0.4.13", "tower-http", "tracing", "vise", - "zksync_config", "zksync_dal", + "zksync_node_test_utils", "zksync_types", + "zksync_utils", ] [[package]] @@ -10800,16 +10803,13 @@ name = "zksync_contract_verifier" version = "0.1.0" dependencies = [ "anyhow", - "ctrlc", - "futures 0.3.31", - "structopt", + "clap 4.5.20", "tokio", "tracing", "zksync_config", "zksync_contract_verifier_lib", "zksync_core_leftovers", "zksync_dal", - "zksync_env_config", "zksync_queued_job_processor", "zksync_utils", "zksync_vlog", diff --git a/Cargo.toml b/Cargo.toml index 87e0de13129f..e491c64605bc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -130,6 +130,7 @@ google-cloud-storage = "0.20.0" governor = "0.4.2" hex = "0.4" http = "1.1" +http-body-util = "0.1.2" httpmock = "0.7.0" hyper = "1.3" insta = "1.29.0" diff --git a/core/bin/contract-verifier/Cargo.toml b/core/bin/contract-verifier/Cargo.toml index f088c2337e71..5e9a9efc6e7e 100644 --- a/core/bin/contract-verifier/Cargo.toml +++ b/core/bin/contract-verifier/Cargo.toml @@ -12,7 +12,6 @@ publish = false [dependencies] zksync_dal.workspace = true -zksync_env_config.workspace = true zksync_config = { workspace = true, features = ["observability_ext"] } zksync_contract_verifier_lib.workspace = true zksync_queued_job_processor.workspace = true @@ -21,8 +20,6 @@ zksync_vlog.workspace = true zksync_core_leftovers.workspace = true anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["full"] } -futures.workspace = true -ctrlc.workspace = true -structopt.workspace = true tracing.workspace = true diff --git a/core/bin/contract-verifier/src/main.rs b/core/bin/contract-verifier/src/main.rs index 6929f8bfe04d..88f25256c40d 100644 --- a/core/bin/contract-verifier/src/main.rs +++ b/core/bin/contract-verifier/src/main.rs @@ -1,8 +1,7 @@ -use std::{cell::RefCell, time::Duration}; +use std::{path::PathBuf, time::Duration}; -use anyhow::Context; -use futures::{channel::mpsc, executor::block_on, SinkExt, StreamExt}; -use structopt::StructOpt; +use anyhow::Context as _; +use clap::Parser; use tokio::sync::watch; use zksync_config::configs::PrometheusConfig; use zksync_contract_verifier_lib::ContractVerifier; @@ -12,27 +11,31 @@ use zksync_queued_job_processor::JobProcessor; use zksync_utils::wait_for_tasks::ManagedTasks; use zksync_vlog::prometheus::PrometheusExporterConfig; -#[derive(StructOpt)] -#[structopt(name = "ZKsync contract code verifier", author = "Matter Labs")] +#[derive(Debug, Parser)] +#[command(name = "ZKsync contract code verifier", author = "Matter Labs")] struct Opt { /// Number of jobs to process. If None, runs indefinitely. - #[structopt(long)] + #[arg(long)] jobs_number: Option, /// Path to the configuration file. - #[structopt(long)] - config_path: Option, + #[arg(long)] + config_path: Option, /// Path to the secrets file. - #[structopt(long)] - secrets_path: Option, + #[arg(long)] + secrets_path: Option, } #[tokio::main] async fn main() -> anyhow::Result<()> { - let opt = Opt::from_args(); + let opt = Opt::parse(); let general_config = load_general_config(opt.config_path).context("general config")?; - let database_secrets = load_database_secrets(opt.secrets_path).context("database secrets")?; + let observability_config = general_config + .observability + .context("ObservabilityConfig")?; + let _observability_guard = observability_config.install()?; + let database_secrets = load_database_secrets(opt.secrets_path).context("database secrets")?; let verifier_config = general_config .contract_verifier .context("ContractVerifierConfig")?; @@ -46,33 +49,13 @@ async fn main() -> anyhow::Result<()> { .context("Master DB URL is absent")?, ) .build() - .await - .unwrap(); - - let observability_config = general_config - .observability - .context("ObservabilityConfig")?; - - let _observability_guard = observability_config.install()?; + .await?; let (stop_sender, stop_receiver) = watch::channel(false); - let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); - { - let stop_signal_sender = RefCell::new(stop_signal_sender.clone()); - ctrlc::set_handler(move || { - let mut sender = stop_signal_sender.borrow_mut(); - block_on(sender.send(true)).expect("Ctrl+C signal send"); - }) - .expect("Error setting Ctrl+C handler"); - } - let contract_verifier = ContractVerifier::new(verifier_config.compilation_timeout(), pool) .await .context("failed initializing contract verifier")?; let tasks = vec![ - // TODO PLA-335: Leftovers after the prover DB split. - // The prover connection pool is not used by the contract verifier, but we need to pass it - // since `JobProcessor` trait requires it. tokio::spawn(contract_verifier.run(stop_receiver.clone(), opt.jobs_number)), tokio::spawn( PrometheusExporterConfig::pull(prometheus_config.listener_port).run(stop_receiver), @@ -82,7 +65,7 @@ async fn main() -> anyhow::Result<()> { let mut tasks = ManagedTasks::new(tasks); tokio::select! { () = tasks.wait_single() => {}, - _ = stop_signal_receiver.next() => { + _ = tokio::signal::ctrl_c() => { tracing::info!("Stop signal received, shutting down"); }, }; diff --git a/core/lib/contract_verifier/src/lib.rs b/core/lib/contract_verifier/src/lib.rs index 425440fa2eb6..686bb0d7bdc3 100644 --- a/core/lib/contract_verifier/src/lib.rs +++ b/core/lib/contract_verifier/src/lib.rs @@ -14,7 +14,7 @@ use zksync_dal::{contract_verification_dal::DeployedContractData, ConnectionPool use zksync_queued_job_processor::{async_trait, JobProcessor}; use zksync_types::{ contract_verification_api::{ - CompilationArtifacts, CompilerType, VerificationIncomingRequest, VerificationInfo, + self as api, CompilationArtifacts, VerificationIncomingRequest, VerificationInfo, VerificationRequest, }, Address, CONTRACT_DEPLOYER_ADDRESS, @@ -35,6 +35,65 @@ mod resolver; #[cfg(test)] mod tests; +#[derive(Debug)] +struct ZkCompilerVersions { + /// Version of the base / non-ZK compiler. + pub base: String, + /// Version of the ZK compiler. + pub zk: String, +} + +/// Internal counterpart of `ContractVersions` from API that encompasses all supported compilation modes. +#[derive(Debug)] +enum VersionedCompiler { + Solc(String), + #[allow(dead_code)] // TODO (EVM-864): add vyper support + Vyper(String), + ZkSolc(ZkCompilerVersions), + ZkVyper(ZkCompilerVersions), +} + +impl From for VersionedCompiler { + fn from(versions: api::CompilerVersions) -> Self { + match versions { + api::CompilerVersions::Solc { + compiler_solc_version, + compiler_zksolc_version: None, + } => Self::Solc(compiler_solc_version), + + api::CompilerVersions::Solc { + compiler_solc_version, + compiler_zksolc_version: Some(zk), + } => Self::ZkSolc(ZkCompilerVersions { + base: compiler_solc_version, + zk, + }), + + api::CompilerVersions::Vyper { + compiler_vyper_version, + compiler_zkvyper_version: None, + } => Self::Vyper(compiler_vyper_version), + + api::CompilerVersions::Vyper { + compiler_vyper_version, + compiler_zkvyper_version: Some(zk), + } => Self::ZkVyper(ZkCompilerVersions { + base: compiler_vyper_version, + zk, + }), + } + } +} + +impl VersionedCompiler { + fn expected_bytecode_kind(&self) -> BytecodeMarker { + match self { + Self::Solc(_) | Self::Vyper(_) => BytecodeMarker::Evm, + Self::ZkSolc(_) | Self::ZkVyper(_) => BytecodeMarker::EraVm, + } + } +} + enum ConstructorArgs { Check(Vec), Ignore, @@ -112,19 +171,19 @@ impl ContractVerifier { let mut transaction = storage.start_transaction().await?; transaction .contract_verification_dal() - .set_zksolc_versions(supported_versions.zksolc) + .set_zksolc_versions(&supported_versions.zksolc) .await?; transaction .contract_verification_dal() - .set_solc_versions(supported_versions.solc) + .set_solc_versions(&supported_versions.solc) .await?; transaction .contract_verification_dal() - .set_zkvyper_versions(supported_versions.zkvyper) + .set_zkvyper_versions(&supported_versions.zkvyper) .await?; transaction .contract_verification_dal() - .set_vyper_versions(supported_versions.vyper) + .set_vyper_versions(&supported_versions.vyper) .await?; transaction.commit().await?; Ok(()) @@ -214,13 +273,11 @@ impl ContractVerifier { async fn compile_zksolc( &self, + version: &ZkCompilerVersions, req: VerificationIncomingRequest, ) -> Result { - let zksolc = self - .compiler_resolver - .resolve_zksolc(&req.compiler_versions) - .await?; - tracing::debug!(?zksolc, ?req.compiler_versions, "resolved compiler"); + let zksolc = self.compiler_resolver.resolve_zksolc(version).await?; + tracing::debug!(?zksolc, ?version, "resolved compiler"); let input = ZkSolc::build_input(req)?; time::timeout(self.compilation_timeout, zksolc.compile(input)) @@ -230,13 +287,11 @@ impl ContractVerifier { async fn compile_zkvyper( &self, + version: &ZkCompilerVersions, req: VerificationIncomingRequest, ) -> Result { - let zkvyper = self - .compiler_resolver - .resolve_zkvyper(&req.compiler_versions) - .await?; - tracing::debug!(?zkvyper, ?req.compiler_versions, "resolved compiler"); + let zkvyper = self.compiler_resolver.resolve_zkvyper(version).await?; + tracing::debug!(?zkvyper, ?version, "resolved compiler"); let input = ZkVyper::build_input(req)?; time::timeout(self.compilation_timeout, zkvyper.compile(input)) .await @@ -245,12 +300,10 @@ impl ContractVerifier { async fn compile_solc( &self, + version: &str, req: VerificationIncomingRequest, ) -> Result { - let solc = self - .compiler_resolver - .resolve_solc(req.compiler_versions.compiler_version()) - .await?; + let solc = self.compiler_resolver.resolve_solc(version).await?; tracing::debug!(?solc, ?req.compiler_versions, "resolved compiler"); let input = Solc::build_input(req)?; @@ -276,15 +329,24 @@ impl ContractVerifier { return Err(err.into()); } - match (bytecode_marker, compiler_type) { - (BytecodeMarker::EraVm, CompilerType::Solc) => self.compile_zksolc(req).await, - (BytecodeMarker::EraVm, CompilerType::Vyper) => self.compile_zkvyper(req).await, - (BytecodeMarker::Evm, CompilerType::Solc) => self.compile_solc(req).await, - (BytecodeMarker::Evm, CompilerType::Vyper) => { - // TODO: add vyper support + let compiler = VersionedCompiler::from(req.compiler_versions.clone()); + if compiler.expected_bytecode_kind() != bytecode_marker { + let err = anyhow::anyhow!( + "bytecode kind expected by compiler {compiler:?} differs from the actual bytecode kind \ + of the verified contract ({bytecode_marker:?})", + ); + return Err(err.into()); + } + + match &compiler { + VersionedCompiler::Solc(version) => self.compile_solc(version, req).await, + VersionedCompiler::Vyper(_) => { + // TODO (EVM-864): add vyper support let err = anyhow::anyhow!("vyper toolchain is not yet supported for EVM contracts"); return Err(err.into()); } + VersionedCompiler::ZkSolc(version) => self.compile_zksolc(version, req).await, + VersionedCompiler::ZkVyper(version) => self.compile_zkvyper(version, req).await, } } diff --git a/core/lib/contract_verifier/src/resolver.rs b/core/lib/contract_verifier/src/resolver.rs index 347db8fff094..34a70b759797 100644 --- a/core/lib/contract_verifier/src/resolver.rs +++ b/core/lib/contract_verifier/src/resolver.rs @@ -6,12 +6,13 @@ use std::{ use anyhow::Context as _; use tokio::fs; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{CompilationArtifacts, CompilerVersions}; +use zksync_types::contract_verification_api::CompilationArtifacts; use zksync_utils::env::Workspace; use crate::{ compilers::{Solc, SolcInput, ZkSolc, ZkSolcInput, ZkVyper, ZkVyperInput}, error::ContractVerifierError, + ZkCompilerVersions, }; #[derive(Debug, Clone, Copy)] @@ -111,13 +112,13 @@ pub(crate) trait CompilerResolver: fmt::Debug + Send + Sync { /// Resolves a `zksolc` compiler. async fn resolve_zksolc( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError>; /// Resolves a `zkvyper` compiler. async fn resolve_zkvyper( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError>; } @@ -198,14 +199,14 @@ impl CompilerResolver for EnvCompilerResolver { async fn resolve_zksolc( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { - let zksolc_version = versions.zk_compiler_version(); + let zksolc_version = &version.zk; let zksolc_path = CompilerType::ZkSolc .bin_path(&self.home_dir, zksolc_version) .await?; let solc_path = CompilerType::Solc - .bin_path(&self.home_dir, versions.compiler_version()) + .bin_path(&self.home_dir, &version.base) .await?; let compiler_paths = CompilerPaths { base: solc_path, @@ -219,13 +220,13 @@ impl CompilerResolver for EnvCompilerResolver { async fn resolve_zkvyper( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { let zkvyper_path = CompilerType::ZkVyper - .bin_path(&self.home_dir, versions.zk_compiler_version()) + .bin_path(&self.home_dir, &version.zk) .await?; let vyper_path = CompilerType::Vyper - .bin_path(&self.home_dir, versions.compiler_version()) + .bin_path(&self.home_dir, &version.base) .await?; let compiler_paths = CompilerPaths { base: vyper_path, diff --git a/core/lib/contract_verifier/src/tests/mod.rs b/core/lib/contract_verifier/src/tests/mod.rs index f05d3155a6d4..7caa5f32c991 100644 --- a/core/lib/contract_verifier/src/tests/mod.rs +++ b/core/lib/contract_verifier/src/tests/mod.rs @@ -280,18 +280,18 @@ impl CompilerResolver for MockCompilerResolver { async fn resolve_zksolc( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { - if versions.compiler_version() != SOLC_VERSION { + if version.base != SOLC_VERSION { return Err(ContractVerifierError::UnknownCompilerVersion( "solc", - versions.compiler_version().to_owned(), + version.base.clone(), )); } - if versions.zk_compiler_version() != ZKSOLC_VERSION { + if version.zk != ZKSOLC_VERSION { return Err(ContractVerifierError::UnknownCompilerVersion( "zksolc", - versions.zk_compiler_version().to_owned(), + version.zk.clone(), )); } Ok(Box::new(self.clone())) @@ -299,7 +299,7 @@ impl CompilerResolver for MockCompilerResolver { async fn resolve_zkvyper( &self, - _versions: &CompilerVersions, + _version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { unreachable!("not tested") } @@ -311,7 +311,7 @@ fn test_request(address: Address, source: &str) -> VerificationIncomingRequest { source_code_data: SourceCodeData::SolSingleFile(source.into()), contract_name: "Counter".to_owned(), compiler_versions: CompilerVersions::Solc { - compiler_zksolc_version: ZKSOLC_VERSION.to_owned(), + compiler_zksolc_version: Some(ZKSOLC_VERSION.to_owned()), compiler_solc_version: SOLC_VERSION.to_owned(), }, optimization_used: true, @@ -375,7 +375,7 @@ async fn contract_verifier_basics(contract: TestContract) { req.constructor_arguments = ethabi::encode(contract.constructor_args()).into(); let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -468,10 +468,14 @@ async fn verifying_evm_bytecode(contract: TestContract) { ) .await; let mut req = test_request(address, contract.source()); + req.compiler_versions = CompilerVersions::Solc { + compiler_solc_version: SOLC_VERSION.to_owned(), + compiler_zksolc_version: None, + }; req.constructor_arguments = ethabi::encode(contract.constructor_args()).into(); let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -513,7 +517,7 @@ async fn bytecode_mismatch_error() { let req = test_request(address, COUNTER_CONTRACT); let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -578,6 +582,13 @@ async fn args_mismatch_error(contract: TestContract, bytecode_kind: BytecodeMark } let mut req = test_request(address, contract.source()); + if matches!(bytecode_kind, BytecodeMarker::Evm) { + req.compiler_versions = CompilerVersions::Solc { + compiler_zksolc_version: None, + compiler_solc_version: SOLC_VERSION.to_owned(), + }; + } + // Intentionally encode incorrect constructor args req.constructor_arguments = match contract { TestContract::Counter => ethabi::encode(&[Token::Bool(true)]).into(), @@ -585,7 +596,7 @@ async fn args_mismatch_error(contract: TestContract, bytecode_kind: BytecodeMark }; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -648,10 +659,14 @@ async fn creation_bytecode_mismatch() { &[], ) .await; - let req = test_request(address, COUNTER_CONTRACT); + let mut req = test_request(address, COUNTER_CONTRACT); + req.compiler_versions = CompilerVersions::Solc { + compiler_zksolc_version: None, + compiler_solc_version: SOLC_VERSION.to_owned(), + }; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -696,14 +711,14 @@ async fn no_compiler_version() { mock_deployment(&mut storage, address, vec![0xff; 32], &[]).await; let req = VerificationIncomingRequest { compiler_versions: CompilerVersions::Solc { - compiler_zksolc_version: ZKSOLC_VERSION.to_owned(), + compiler_zksolc_version: Some(ZKSOLC_VERSION.to_owned()), compiler_solc_version: "1.0.0".to_owned(), // a man can dream }, ..test_request(address, COUNTER_CONTRACT) }; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); diff --git a/core/lib/contract_verifier/src/tests/real.rs b/core/lib/contract_verifier/src/tests/real.rs index 5f550a5feea8..a7113044b405 100644 --- a/core/lib/contract_verifier/src/tests/real.rs +++ b/core/lib/contract_verifier/src/tests/real.rs @@ -8,7 +8,7 @@ use zksync_utils::bytecode::validate_bytecode; use super::*; -#[derive(Debug)] +#[derive(Debug, Clone)] struct TestCompilerVersions { solc: String, zksolc: String, @@ -26,10 +26,20 @@ impl TestCompilerVersions { }) } - fn for_zksolc(self) -> CompilerVersions { + fn zksolc(self) -> ZkCompilerVersions { + ZkCompilerVersions { + base: self.solc, + zk: self.zksolc, + } + } + + fn solc_for_api(self, bytecode_kind: BytecodeMarker) -> CompilerVersions { CompilerVersions::Solc { compiler_solc_version: self.solc, - compiler_zksolc_version: self.zksolc, + compiler_zksolc_version: match bytecode_kind { + BytecodeMarker::Evm => None, + BytecodeMarker::EraVm => Some(self.zksolc), + }, } } } @@ -70,10 +80,12 @@ macro_rules! real_resolver { async fn using_real_compiler() { let (compiler_resolver, supported_compilers) = real_resolver!(); - let versions = supported_compilers.for_zksolc(); - let compiler = compiler_resolver.resolve_zksolc(&versions).await.unwrap(); + let compiler = compiler_resolver + .resolve_zksolc(&supported_compilers.clone().zksolc()) + .await + .unwrap(); let req = VerificationIncomingRequest { - compiler_versions: versions, + compiler_versions: supported_compilers.solc_for_api(BytecodeMarker::EraVm), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; let input = ZkSolc::build_input(req).unwrap(); @@ -92,7 +104,7 @@ async fn using_standalone_solc() { let req = VerificationIncomingRequest { compiler_versions: CompilerVersions::Solc { compiler_solc_version: version.clone(), - compiler_zksolc_version: "1000.0.0".to_owned(), // not used + compiler_zksolc_version: None, }, ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; @@ -108,23 +120,22 @@ async fn using_standalone_solc() { async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { let (compiler_resolver, supported_compilers) = real_resolver!(); - let versions = supported_compilers.for_zksolc(); let req = VerificationIncomingRequest { - compiler_versions: versions, + compiler_versions: supported_compilers.clone().solc_for_api(bytecode_kind), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; let address = Address::repeat_byte(1); let output = match bytecode_kind { BytecodeMarker::EraVm => { let compiler = compiler_resolver - .resolve_zksolc(&req.compiler_versions) + .resolve_zksolc(&supported_compilers.zksolc()) .await .unwrap(); let input = ZkSolc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() } BytecodeMarker::Evm => { - let solc_version = req.compiler_versions.compiler_version(); + let solc_version = &supported_compilers.solc; let compiler = compiler_resolver.resolve_solc(solc_version).await.unwrap(); let input = Solc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() @@ -151,7 +162,7 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { } let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -174,10 +185,9 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { async fn compilation_errors(bytecode_kind: BytecodeMarker) { let (compiler_resolver, supported_compilers) = real_resolver!(); - let versions = supported_compilers.for_zksolc(); let address = Address::repeat_byte(1); let req = VerificationIncomingRequest { - compiler_versions: versions, + compiler_versions: supported_compilers.solc_for_api(bytecode_kind), source_code_data: SourceCodeData::SolSingleFile("contract ???".to_owned()), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; @@ -196,7 +206,7 @@ async fn compilation_errors(bytecode_kind: BytecodeMarker) { let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); diff --git a/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json b/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json index 1d515edba819..0db6ba6f51b6 100644 --- a/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json +++ b/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json @@ -69,7 +69,7 @@ false, false, false, - false, + true, false, false, true, diff --git a/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json b/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json index ebe8ce232cfb..ac7989a5be77 100644 --- a/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json +++ b/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json @@ -67,7 +67,7 @@ false, false, false, - false, + true, false, false, true, diff --git a/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql new file mode 100644 index 000000000000..2693a565fd02 --- /dev/null +++ b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + ALTER COLUMN zk_compiler_version SET NOT NULL; diff --git a/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql new file mode 100644 index 000000000000..92a689956f55 --- /dev/null +++ b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + ALTER COLUMN zk_compiler_version DROP NOT NULL; diff --git a/core/lib/dal/src/contract_verification_dal.rs b/core/lib/dal/src/contract_verification_dal.rs index 1a827545ca13..93a4ce2fd35a 100644 --- a/core/lib/dal/src/contract_verification_dal.rs +++ b/core/lib/dal/src/contract_verification_dal.rs @@ -76,7 +76,7 @@ impl ContractVerificationDal<'_, '_> { pub async fn add_contract_verification_request( &mut self, - query: VerificationIncomingRequest, + query: &VerificationIncomingRequest, ) -> DalResult { sqlx::query!( r#" @@ -104,12 +104,12 @@ impl ContractVerificationDal<'_, '_> { query.contract_address.as_bytes(), // Serialization should always succeed. serde_json::to_string(&query.source_code_data).unwrap(), - query.contract_name, + &query.contract_name, query.compiler_versions.zk_compiler_version(), query.compiler_versions.compiler_version(), query.optimization_used, - query.optimizer_mode, - query.constructor_arguments.0, + query.optimizer_mode.as_deref(), + query.constructor_arguments.0.as_slice(), query.is_system, query.force_evmla, ) @@ -441,7 +441,7 @@ impl ContractVerificationDal<'_, '_> { async fn set_compiler_versions( &mut self, compiler: Compiler, - versions: Vec, + versions: &[String], ) -> DalResult<()> { let mut transaction = self.storage.start_transaction().await?; let compiler = format!("{compiler}"); @@ -472,7 +472,7 @@ impl ContractVerificationDal<'_, '_> { UNNEST($1::TEXT []) AS u (version) ON CONFLICT (version, compiler) DO NOTHING "#, - &versions, + versions, &compiler, ) .instrument("set_compiler_versions#insert") @@ -484,20 +484,20 @@ impl ContractVerificationDal<'_, '_> { transaction.commit().await } - pub async fn set_zksolc_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_zksolc_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::ZkSolc, versions).await } - pub async fn set_solc_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_solc_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::Solc, versions).await } - pub async fn set_zkvyper_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_zkvyper_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::ZkVyper, versions) .await } - pub async fn set_vyper_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_vyper_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::Vyper, versions).await } @@ -567,7 +567,9 @@ mod tests { use std::collections::HashMap; use zksync_types::{ - tx::IncludedTxLocation, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, + contract_verification_api::{CompilerVersions, SourceCodeData}, + tx::IncludedTxLocation, + Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, }; use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics}; @@ -645,4 +647,66 @@ mod tests { assert_eq!(contract.contract_address, Some(CONTRACT_DEPLOYER_ADDRESS)); assert_eq!(contract.calldata.unwrap(), tx.execute.calldata); } + + async fn test_working_with_verification_requests(zksolc: Option<&str>) { + let request = VerificationIncomingRequest { + contract_address: Address::repeat_byte(11), + source_code_data: SourceCodeData::SolSingleFile("contract Test {}".to_owned()), + contract_name: "Test".to_string(), + compiler_versions: CompilerVersions::Solc { + compiler_zksolc_version: zksolc.map(str::to_owned), + compiler_solc_version: "0.8.27".to_owned(), + }, + optimization_used: true, + optimizer_mode: Some("z".to_owned()), + constructor_arguments: web3::Bytes(b"test".to_vec()), + is_system: false, + force_evmla: true, + }; + + let pool = ConnectionPool::::test_pool().await; + let mut conn = pool.connection().await.unwrap(); + let id = conn + .contract_verification_dal() + .add_contract_verification_request(&request) + .await + .unwrap(); + + let status = conn + .contract_verification_dal() + .get_verification_request_status(id) + .await + .unwrap() + .expect("request not persisted"); + assert_eq!(status.status, "queued"); + + let req = conn + .contract_verification_dal() + .get_next_queued_verification_request(Duration::from_secs(600)) + .await + .unwrap() + .expect("request not queued"); + assert_eq!(req.id, id); + assert_eq!(req.req.contract_address, request.contract_address); + assert_eq!(req.req.contract_name, request.contract_name); + assert_eq!(req.req.compiler_versions, request.compiler_versions); + assert_eq!(req.req.optimization_used, request.optimization_used); + assert_eq!(req.req.optimizer_mode, request.optimizer_mode); + assert_eq!(req.req.constructor_arguments, request.constructor_arguments); + assert_eq!(req.req.is_system, request.is_system); + assert_eq!(req.req.force_evmla, request.force_evmla); + + let maybe_req = conn + .contract_verification_dal() + .get_next_queued_verification_request(Duration::from_secs(600)) + .await + .unwrap(); + assert!(maybe_req.is_none()); + } + + #[tokio::test] + async fn working_with_verification_requests() { + test_working_with_verification_requests(None).await; + test_working_with_verification_requests(Some("1.5.7")).await; + } } diff --git a/core/lib/dal/src/models/storage_verification_request.rs b/core/lib/dal/src/models/storage_verification_request.rs index 61895fab76d3..ae4718e41290 100644 --- a/core/lib/dal/src/models/storage_verification_request.rs +++ b/core/lib/dal/src/models/storage_verification_request.rs @@ -12,7 +12,7 @@ pub struct StorageVerificationRequest { pub contract_address: Vec, pub source_code: String, pub contract_name: String, - pub zk_compiler_version: String, + pub zk_compiler_version: Option, pub compiler_version: String, pub optimization_used: bool, pub optimizer_mode: Option, diff --git a/core/lib/dal/src/storage_logs_dal.rs b/core/lib/dal/src/storage_logs_dal.rs index ced8f594add3..1675d76643c2 100644 --- a/core/lib/dal/src/storage_logs_dal.rs +++ b/core/lib/dal/src/storage_logs_dal.rs @@ -225,60 +225,13 @@ impl StorageLogsDal<'_, '_> { Ok(()) } - pub async fn is_contract_deployed_at_address(&mut self, address: Address) -> bool { - let hashed_key = get_code_key(&address).hashed_key(); - let row = sqlx::query!( - r#" - SELECT - COUNT(*) AS "count!" - FROM - ( - SELECT - * - FROM - storage_logs - WHERE - hashed_key = $1 - AND miniblock_number <= COALESCE( - ( - SELECT - MAX(number) - FROM - miniblocks - ), - ( - SELECT - miniblock_number - FROM - snapshot_recovery - ) - ) - ORDER BY - miniblock_number DESC, - operation_number DESC - LIMIT - 1 - ) sl - WHERE - sl.value != $2 - "#, - hashed_key.as_bytes(), - FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes(), - ) - .fetch_one(self.storage.conn()) - .await - .unwrap(); - - row.count > 0 - } - /// Returns addresses and the corresponding deployment L2 block numbers among the specified contract /// `addresses`. `at_l2_block` allows filtering deployment by L2 blocks. pub async fn filter_deployed_contracts( &mut self, addresses: impl Iterator, at_l2_block: Option, - ) -> DalResult> { + ) -> DalResult> { let (bytecode_hashed_keys, address_by_hashed_key): (Vec<_>, HashMap<_, _>) = addresses .map(|address| { let hashed_key = get_code_key(&address).hashed_key().0; @@ -330,12 +283,13 @@ impl StorageLogsDal<'_, '_> { .await?; let deployment_data = rows.into_iter().filter_map(|row| { - if row.value == FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() { + let bytecode_hash = H256::from_slice(&row.value); + if bytecode_hash == FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH { return None; } let l2_block_number = L2BlockNumber(row.miniblock_number as u32); let address = address_by_hashed_key[row.hashed_key.as_slice()]; - Some((address, l2_block_number)) + Some((address, (l2_block_number, bytecode_hash))) }); Ok(deployment_data.collect()) } @@ -1168,8 +1122,9 @@ mod tests { async fn filtering_deployed_contracts() { let contract_address = Address::repeat_byte(1); let other_contract_address = Address::repeat_byte(23); + let bytecode_hash = H256::repeat_byte(0xff); let successful_deployment = - StorageLog::new_write_log(get_code_key(&contract_address), H256::repeat_byte(0xff)); + StorageLog::new_write_log(get_code_key(&contract_address), bytecode_hash); let failed_deployment = StorageLog::new_write_log( get_code_key(&contract_address), FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, @@ -1233,7 +1188,7 @@ mod tests { .unwrap(); assert_eq!( deployed_map, - HashMap::from([(contract_address, L2BlockNumber(2))]) + HashMap::from([(contract_address, (L2BlockNumber(2), bytecode_hash))]) ); } @@ -1268,7 +1223,7 @@ mod tests { .unwrap(); assert_eq!( deployed_map, - HashMap::from([(contract_address, L2BlockNumber(2))]) + HashMap::from([(contract_address, (L2BlockNumber(2), bytecode_hash))]) ); for new_l2_block in [None, Some(L2BlockNumber(3))] { @@ -1283,8 +1238,8 @@ mod tests { assert_eq!( deployed_map, HashMap::from([ - (contract_address, L2BlockNumber(2)), - (other_contract_address, L2BlockNumber(3)), + (contract_address, (L2BlockNumber(2), bytecode_hash)), + (other_contract_address, (L2BlockNumber(3), bytecode_hash)), ]) ); } diff --git a/core/lib/dal/src/tokens_dal.rs b/core/lib/dal/src/tokens_dal.rs index 218e152fa82a..b5fd67fc63c8 100644 --- a/core/lib/dal/src/tokens_dal.rs +++ b/core/lib/dal/src/tokens_dal.rs @@ -98,7 +98,7 @@ impl TokensDal<'_, '_> { .filter_map(|address| { if address.is_zero() { None - } else if let Some(deployed_at) = token_deployment_data.get(&address) { + } else if let Some((deployed_at, _)) = token_deployment_data.get(&address) { (deployed_at > &block_number).then_some(address.0) } else { // Token belongs to a "pending" L2 block that's not yet fully inserted to the database. diff --git a/core/lib/types/src/contract_verification_api.rs b/core/lib/types/src/contract_verification_api.rs index fcaa1aa9a535..21e511549beb 100644 --- a/core/lib/types/src/contract_verification_api.rs +++ b/core/lib/types/src/contract_verification_api.rs @@ -152,17 +152,19 @@ pub enum CompilerType { Vyper, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum CompilerVersions { #[serde(rename_all = "camelCase")] Solc { - compiler_zksolc_version: String, // FIXME: optional? + #[serde(default, skip_serializing_if = "Option::is_none")] + compiler_zksolc_version: Option, compiler_solc_version: String, }, #[serde(rename_all = "camelCase")] Vyper { - compiler_zkvyper_version: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + compiler_zkvyper_version: Option, compiler_vyper_version: String, }, } @@ -175,16 +177,16 @@ impl CompilerVersions { } } - pub fn zk_compiler_version(&self) -> &str { + pub fn zk_compiler_version(&self) -> Option<&str> { match self { Self::Solc { compiler_zksolc_version, .. - } => compiler_zksolc_version, + } => compiler_zksolc_version.as_deref(), Self::Vyper { compiler_zkvyper_version, .. - } => compiler_zkvyper_version, + } => compiler_zkvyper_version.as_deref(), } } diff --git a/core/node/contract_verification_server/Cargo.toml b/core/node/contract_verification_server/Cargo.toml index eeb2c7828467..038347debc64 100644 --- a/core/node/contract_verification_server/Cargo.toml +++ b/core/node/contract_verification_server/Cargo.toml @@ -11,9 +11,9 @@ keywords.workspace = true categories.workspace = true [dependencies] -zksync_config.workspace = true zksync_dal.workspace = true zksync_types.workspace = true +zksync_utils.workspace = true vise.workspace = true anyhow.workspace = true @@ -21,5 +21,11 @@ axum.workspace = true tokio = { workspace = true, features = ["time"] } tower-http = { workspace = true, features = ["cors"] } tracing.workspace = true -serde.workspace = true + +[dev-dependencies] +zksync_node_test_utils.workspace = true + +http-body-util.workspace = true serde_json.workspace = true +test-casing.workspace = true +tower.workspace = true diff --git a/core/node/contract_verification_server/src/api_decl.rs b/core/node/contract_verification_server/src/api_decl.rs index 256062936d32..d451cd79add9 100644 --- a/core/node/contract_verification_server/src/api_decl.rs +++ b/core/node/contract_verification_server/src/api_decl.rs @@ -3,10 +3,13 @@ use std::sync::Arc; use tower_http::cors::CorsLayer; use zksync_dal::{ConnectionPool, Core}; +use crate::cache::SupportedCompilersCache; + #[derive(Debug, Clone)] -pub struct RestApi { - pub(super) master_connection_pool: ConnectionPool, - pub(super) replica_connection_pool: ConnectionPool, +pub(crate) struct RestApi { + pub(crate) master_connection_pool: ConnectionPool, + pub(crate) replica_connection_pool: ConnectionPool, + pub(crate) supported_compilers: Arc, } impl RestApi { @@ -14,7 +17,9 @@ impl RestApi { master_connection_pool: ConnectionPool, replica_connection_pool: ConnectionPool, ) -> Self { + let supported_compilers = SupportedCompilersCache::new(replica_connection_pool.clone()); Self { + supported_compilers: Arc::new(supported_compilers), master_connection_pool, replica_connection_pool, } diff --git a/core/node/contract_verification_server/src/api_impl.rs b/core/node/contract_verification_server/src/api_impl.rs index b8111e98a1cc..94be65673bad 100644 --- a/core/node/contract_verification_server/src/api_impl.rs +++ b/core/node/contract_verification_server/src/api_impl.rs @@ -1,195 +1,234 @@ -use std::sync::Arc; +use std::{collections::HashSet, iter, sync::Arc}; +use anyhow::Context as _; use axum::{ extract::{Path, State}, - response::Response, + http::StatusCode, + response::{IntoResponse, Response}, Json, }; -use serde::Serialize; -use zksync_dal::CoreDal; -use zksync_types::{contract_verification_api::VerificationIncomingRequest, Address}; +use zksync_dal::{CoreDal, DalError}; +use zksync_types::{ + contract_verification_api::{ + CompilerVersions, VerificationIncomingRequest, VerificationInfo, VerificationRequestStatus, + }, + Address, +}; +use zksync_utils::bytecode::BytecodeMarker; use super::{api_decl::RestApi, metrics::METRICS}; -fn ok_json(data: impl Serialize) -> Response { - Response::builder() - .status(axum::http::StatusCode::OK) - .body(serde_json::to_string(&data).expect("Failed to serialize")) - .unwrap() +#[derive(Debug)] +pub(crate) enum ApiError { + IncorrectCompilerVersions, + UnsupportedCompilerVersions, + MissingZkCompilerVersion, + BogusZkCompilerVersion, + NoDeployedContract, + RequestNotFound, + VerificationInfoNotFound, + Internal(anyhow::Error), +} + +impl From for ApiError { + fn from(err: anyhow::Error) -> Self { + Self::Internal(err) + } +} + +impl From for ApiError { + fn from(err: DalError) -> Self { + Self::Internal(err.generalize()) + } } -fn bad_request(message: &str) -> Response { - Response::builder() - .status(axum::http::StatusCode::BAD_REQUEST) - .body(message.to_string()) - .unwrap() +impl ApiError { + pub fn message(&self) -> &'static str { + match self { + Self::IncorrectCompilerVersions => "incorrect compiler versions", + Self::UnsupportedCompilerVersions => "unsupported compiler versions", + Self::MissingZkCompilerVersion => "missing zk compiler version for EraVM bytecode", + Self::BogusZkCompilerVersion => "zk compiler version specified for EVM bytecode", + Self::NoDeployedContract => "There is no deployed contract on this address", + Self::RequestNotFound => "request not found", + Self::VerificationInfoNotFound => "verification info not found for address", + Self::Internal(_) => "internal server error", + } + } } -fn not_found() -> Response { - Response::builder() - .status(axum::http::StatusCode::NOT_FOUND) - .body(String::new()) - .unwrap() +impl IntoResponse for ApiError { + fn into_response(self) -> Response { + let status_code = match &self { + Self::IncorrectCompilerVersions + | Self::UnsupportedCompilerVersions + | Self::MissingZkCompilerVersion + | Self::BogusZkCompilerVersion + | Self::NoDeployedContract => StatusCode::BAD_REQUEST, + + Self::RequestNotFound | Self::VerificationInfoNotFound => StatusCode::NOT_FOUND, + + Self::Internal(err) => { + // Do not expose the error details to the client, but log it. + tracing::warn!("Internal error: {err:#}"); + StatusCode::INTERNAL_SERVER_ERROR + } + }; + (status_code, self.message()).into_response() + } } +type ApiResult = Result, ApiError>; + impl RestApi { #[tracing::instrument(skip(query))] fn validate_contract_verification_query( query: &VerificationIncomingRequest, - ) -> Result<(), Response> { + ) -> Result<(), ApiError> { if query.source_code_data.compiler_type() != query.compiler_versions.compiler_type() { - return Err(bad_request("incorrect compiler versions")); + return Err(ApiError::IncorrectCompilerVersions); } - Ok(()) } + fn validate_compilers( + versions: &CompilerVersions, + bytecode_kind: BytecodeMarker, + ) -> Result<(), ApiError> { + match bytecode_kind { + BytecodeMarker::EraVm if versions.zk_compiler_version().is_none() => { + Err(ApiError::MissingZkCompilerVersion) + } + BytecodeMarker::Evm if versions.zk_compiler_version().is_some() => { + Err(ApiError::BogusZkCompilerVersion) + } + _ => Ok(()), + } + } + /// Add a contract verification job to the queue if the requested contract wasn't previously verified. + // FIXME: this doesn't seem to check that the contract isn't verified; should it? #[tracing::instrument(skip(self_, request))] pub async fn verification( State(self_): State>, Json(request): Json, - ) -> Response { + ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification"].start(); - if let Err(res) = Self::validate_contract_verification_query(&request) { - return res; + Self::validate_contract_verification_query(&request)?; + + let is_compilation_supported = self_ + .supported_compilers + .get(|supported| supported.contain(&request.compiler_versions)) + .await?; + if !is_compilation_supported { + return Err(ApiError::UnsupportedCompilerVersions); } + let mut storage = self_ .master_connection_pool .connection_tagged("api") - .await - .unwrap(); - - if !storage + .await?; + let deployment_info = storage .storage_logs_dal() - .is_contract_deployed_at_address(request.contract_address) - .await - { - return bad_request("There is no deployed contract on this address"); - } + .filter_deployed_contracts(iter::once(request.contract_address), None) + .await?; + let &(_, bytecode_hash) = deployment_info + .get(&request.contract_address) + .ok_or(ApiError::NoDeployedContract)?; + let bytecode_marker = BytecodeMarker::new(bytecode_hash).with_context(|| { + format!( + "unknown bytecode marker for bytecode hash {bytecode_hash:?} at address {:?}", + request.contract_address + ) + })?; + Self::validate_compilers(&request.compiler_versions, bytecode_marker)?; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(request) - .await - .unwrap(); - + .add_contract_verification_request(&request) + .await?; method_latency.observe(); - ok_json(request_id) + Ok(Json(request_id)) } #[tracing::instrument(skip(self_))] pub async fn verification_request_status( State(self_): State>, id: Path, - ) -> Response { + ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification_request_status"].start(); let status = self_ .replica_connection_pool .connection_tagged("api") - .await - .unwrap() + .await? .contract_verification_dal() .get_verification_request_status(*id) - .await - .unwrap(); + .await? + .ok_or(ApiError::RequestNotFound)?; method_latency.observe(); - match status { - Some(status) => ok_json(status), - None => not_found(), - } + Ok(Json(status)) } #[tracing::instrument(skip(self_))] - pub async fn zksolc_versions(State(self_): State>) -> Response { + pub async fn zksolc_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_zksolc_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_zksolc_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.zksolc.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] - pub async fn solc_versions(State(self_): State>) -> Response { + pub async fn solc_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_solc_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_solc_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.solc.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] - pub async fn zkvyper_versions(State(self_): State>) -> Response { + pub async fn zkvyper_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_zkvyper_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_zkvyper_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.zkvyper.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] - pub async fn vyper_versions(State(self_): State>) -> Response { + pub async fn vyper_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_vyper_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_vyper_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.vyper.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] pub async fn verification_info( State(self_): State>, address: Path
, - ) -> Response { + ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification_info"].start(); - let info = self_ .replica_connection_pool .connection_tagged("api") - .await - .unwrap() + .await? .contract_verification_dal() .get_contract_verification_info(*address) - .await - .unwrap(); - + .await? + .ok_or(ApiError::VerificationInfoNotFound)?; method_latency.observe(); - match info { - Some(info) => ok_json(info), - None => not_found(), - } + Ok(Json(info)) } } diff --git a/core/node/contract_verification_server/src/cache.rs b/core/node/contract_verification_server/src/cache.rs new file mode 100644 index 000000000000..c8e367515287 --- /dev/null +++ b/core/node/contract_verification_server/src/cache.rs @@ -0,0 +1,122 @@ +use std::{ + collections::HashSet, + time::{Duration, Instant}, +}; + +use tokio::sync::RwLock; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; +use zksync_types::contract_verification_api::CompilerVersions; + +/// Compiler versions supported by the contract verifier. +#[derive(Debug, Clone)] +pub(crate) struct SupportedCompilerVersions { + pub solc: HashSet, + pub zksolc: HashSet, + pub vyper: HashSet, + pub zkvyper: HashSet, +} + +impl SupportedCompilerVersions { + /// Checks whether the supported compilers include ones specified in a request. + pub fn contain(&self, versions: &CompilerVersions) -> bool { + match versions { + CompilerVersions::Solc { + compiler_solc_version, + compiler_zksolc_version, + } => { + self.solc.contains(compiler_solc_version) + && compiler_zksolc_version + .as_ref() + .map_or(true, |ver| self.zksolc.contains(ver)) + } + CompilerVersions::Vyper { + compiler_vyper_version, + compiler_zkvyper_version, + } => { + self.vyper.contains(compiler_vyper_version) + && compiler_zkvyper_version + .as_ref() + .map_or(true, |ver| self.zkvyper.contains(ver)) + } + } + } +} + +impl SupportedCompilerVersions { + async fn new(connection: &mut Connection<'_, Core>) -> Result { + let solc = connection + .contract_verification_dal() + .get_solc_versions() + .await?; + let zksolc = connection + .contract_verification_dal() + .get_zksolc_versions() + .await?; + let vyper = connection + .contract_verification_dal() + .get_vyper_versions() + .await?; + let zkvyper = connection + .contract_verification_dal() + .get_zkvyper_versions() + .await?; + Ok(Self { + solc: solc.into_iter().collect(), + zksolc: zksolc.into_iter().collect(), + vyper: vyper.into_iter().collect(), + zkvyper: zkvyper.into_iter().collect(), + }) + } +} + +/// Cache for compiler versions supported by the contract verifier. +#[derive(Debug)] +pub(crate) struct SupportedCompilersCache { + connection_pool: ConnectionPool, + inner: RwLock>, +} + +impl SupportedCompilersCache { + const CACHE_UPDATE_INTERVAL: Duration = Duration::from_secs(10); + + pub fn new(connection_pool: ConnectionPool) -> Self { + Self { + connection_pool, + inner: RwLock::new(None), + } + } + + fn get_cached( + cache: Option<&(SupportedCompilerVersions, Instant)>, + action: impl FnOnce(&SupportedCompilerVersions) -> R, + ) -> Option { + cache.and_then(|(versions, updated_at)| { + (updated_at.elapsed() <= Self::CACHE_UPDATE_INTERVAL).then(|| action(versions)) + }) + } + + pub async fn get( + &self, + action: impl Fn(&SupportedCompilerVersions) -> R, + ) -> Result { + let output = Self::get_cached(self.inner.read().await.as_ref(), &action); + if let Some(output) = output { + return Ok(output); + } + + // We don't want to hold an exclusive lock while querying Postgres. + let supported = { + let mut connection = self.connection_pool.connection_tagged("api").await?; + let mut db_transaction = connection + .transaction_builder()? + .set_readonly() + .build() + .await?; + SupportedCompilerVersions::new(&mut db_transaction).await? + }; + let output = action(&supported); + // Another task may have written to the cache already, but we should be fine with updating it again. + *self.inner.write().await = Some((supported, Instant::now())); + Ok(output) + } +} diff --git a/core/node/contract_verification_server/src/lib.rs b/core/node/contract_verification_server/src/lib.rs index eea45f8564bf..912cec55f0b8 100644 --- a/core/node/contract_verification_server/src/lib.rs +++ b/core/node/contract_verification_server/src/lib.rs @@ -1,21 +1,24 @@ +use std::net::SocketAddr; + use anyhow::Context as _; use tokio::sync::watch; -use zksync_config::ContractVerifierConfig; use zksync_dal::ConnectionPool; use self::api_decl::RestApi; mod api_decl; mod api_impl; +mod cache; mod metrics; +#[cfg(test)] +mod tests; pub async fn start_server( master_connection_pool: ConnectionPool, replica_connection_pool: ConnectionPool, - config: ContractVerifierConfig, + bind_address: SocketAddr, mut stop_receiver: watch::Receiver, ) -> anyhow::Result<()> { - let bind_address = config.bind_addr(); let api = RestApi::new(master_connection_pool, replica_connection_pool).into_router(); let listener = tokio::net::TcpListener::bind(bind_address) diff --git a/core/node/contract_verification_server/src/tests.rs b/core/node/contract_verification_server/src/tests.rs new file mode 100644 index 000000000000..b7b0d3e8efb4 --- /dev/null +++ b/core/node/contract_verification_server/src/tests.rs @@ -0,0 +1,356 @@ +//! Tests for contract verification API server. + +use std::{str, time::Duration}; + +use axum::{ + body::Body, + http::{header, Method, Request, Response, StatusCode}, +}; +use http_body_util::BodyExt as _; +use test_casing::test_casing; +use tower::ServiceExt; +use zksync_dal::{Connection, Core, CoreDal}; +use zksync_node_test_utils::create_l2_block; +use zksync_types::{ + contract_verification_api::CompilerVersions, get_code_key, Address, L2BlockNumber, + ProtocolVersion, StorageLog, +}; +use zksync_utils::bytecode::{hash_bytecode, hash_evm_bytecode, BytecodeMarker}; + +use super::*; +use crate::api_impl::ApiError; + +const SOLC_VERSION: &str = "0.8.27"; +const ZKSOLC_VERSION: &str = "1.5.6"; + +async fn prepare_storage(storage: &mut Connection<'_, Core>) { + storage + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + storage + .blocks_dal() + .insert_l2_block(&create_l2_block(0)) + .await + .unwrap(); + + storage + .contract_verification_dal() + .set_solc_versions(&[SOLC_VERSION.to_owned()]) + .await + .unwrap(); + storage + .contract_verification_dal() + .set_zksolc_versions(&[ZKSOLC_VERSION.to_owned()]) + .await + .unwrap(); +} + +async fn mock_deploy_contract( + storage: &mut Connection<'_, Core>, + address: Address, + kind: BytecodeMarker, +) { + let bytecode_hash = match kind { + BytecodeMarker::EraVm => hash_bytecode(&[0; 32]), + BytecodeMarker::Evm => hash_evm_bytecode(&[0; 96]), + }; + let deploy_log = StorageLog::new_write_log(get_code_key(&address), bytecode_hash); + storage + .storage_logs_dal() + .append_storage_logs(L2BlockNumber(0), &[deploy_log]) + .await + .unwrap() +} + +fn post_request(body: &serde_json::Value) -> Request { + Request::builder() + .method(Method::POST) + .uri("/contract_verification") + .header(header::CONTENT_TYPE, "application/json") + .body(Body::from(serde_json::to_vec(body).unwrap())) + .unwrap() +} + +async fn json_response(response: Response) -> serde_json::Value { + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get(header::CONTENT_TYPE).unwrap(), + "application/json" + ); + let response = response.into_body(); + let response = response.collect().await.unwrap().to_bytes(); + serde_json::from_slice(&response).unwrap() +} + +#[tokio::test] +async fn getting_compiler_versions() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let router = RestApi::new(pool.clone(), pool).into_router(); + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/zksolc_versions") + .body(Body::empty()) + .unwrap(); + let response = router.clone().oneshot(req).await.unwrap(); + let versions = json_response(response).await; + assert_eq!(versions, serde_json::json!([ZKSOLC_VERSION])); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/solc_versions") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + let versions = json_response(response).await; + assert_eq!(versions, serde_json::json!([SOLC_VERSION])); +} + +#[test_casing(2, [BytecodeMarker::EraVm, BytecodeMarker::Evm])] +#[tokio::test] +async fn submitting_request(bytecode_kind: BytecodeMarker) { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerZksolcVersion": match bytecode_kind { + BytecodeMarker::EraVm => Some(ZKSOLC_VERSION), + BytecodeMarker::Evm => None, + }, + "compilerSolcVersion": SOLC_VERSION, + "optimizationUsed": true, + }); + + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .clone() + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); // the address is not deployed to + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::NoDeployedContract.message()); + + mock_deploy_contract(&mut storage, address, bytecode_kind).await; + + let response = router + .clone() + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + let id = json_response(response).await; + assert_eq!(id, serde_json::json!(1)); + + let request = storage + .contract_verification_dal() + .get_next_queued_verification_request(Duration::from_secs(600)) + .await + .unwrap() + .expect("request not persisted"); + assert_eq!(request.id, 1); + assert_eq!(request.req.contract_address, address); + assert_eq!( + request.req.compiler_versions, + CompilerVersions::Solc { + compiler_zksolc_version: match bytecode_kind { + BytecodeMarker::EraVm => Some(ZKSOLC_VERSION.to_owned()), + BytecodeMarker::Evm => None, + }, + compiler_solc_version: SOLC_VERSION.to_owned(), + } + ); + assert_eq!(request.req.contract_name, "Test"); + assert!(request.req.optimization_used); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/1") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + let request_status = json_response(response).await; + assert_eq!(request_status["status"], "in_progress"); +} + +#[test_casing(2, [BytecodeMarker::EraVm, BytecodeMarker::Evm])] +#[tokio::test] +async fn submitting_request_with_invalid_compiler_type(bytecode_kind: BytecodeMarker) { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, bytecode_kind).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + // Intentionally incorrect versions "shape" + "compilerZksolcVersion": match bytecode_kind { + BytecodeMarker::Evm => Some(ZKSOLC_VERSION), + BytecodeMarker::EraVm => None, + }, + "compilerSolcVersion": SOLC_VERSION, + "optimizationUsed": true, + }); + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + let expected_message = match bytecode_kind { + BytecodeMarker::Evm => ApiError::BogusZkCompilerVersion.message(), + BytecodeMarker::EraVm => ApiError::MissingZkCompilerVersion.message(), + }; + assert_eq!(error_message, expected_message); +} + +#[test_casing(2, [BytecodeMarker::EraVm, BytecodeMarker::Evm])] +#[tokio::test] +async fn submitting_request_with_unsupported_solc(bytecode_kind: BytecodeMarker) { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, bytecode_kind).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerZksolcVersion": match bytecode_kind { + BytecodeMarker::Evm => None, + BytecodeMarker::EraVm => Some(ZKSOLC_VERSION), + }, + "compilerSolcVersion": "1.0.0", + "optimizationUsed": true, + }); + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!( + error_message, + ApiError::UnsupportedCompilerVersions.message() + ); +} + +#[tokio::test] +async fn submitting_request_with_unsupported_zksolc() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, BytecodeMarker::EraVm).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerZksolcVersion": "1000.0.0", + "compilerSolcVersion": SOLC_VERSION, + "optimizationUsed": true, + }); + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!( + error_message, + ApiError::UnsupportedCompilerVersions.message() + ); +} + +#[tokio::test] +async fn querying_missing_request() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + let router = RestApi::new(pool.clone(), pool).into_router(); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/1") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + + assert_eq!(response.status(), StatusCode::NOT_FOUND); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::RequestNotFound.message()); +} + +#[tokio::test] +async fn querying_missing_verification_info() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + let router = RestApi::new(pool.clone(), pool).into_router(); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/info/0x2323232323232323232323232323232323232323") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + + assert_eq!(response.status(), StatusCode::NOT_FOUND); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::VerificationInfoNotFound.message()); +} + +#[tokio::test] +async fn mismatched_compiler_type() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, BytecodeMarker::EraVm).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerVyperVersion": "1.0.1", + "optimizationUsed": true, + }); + + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::IncorrectCompilerVersions.message()); +} diff --git a/core/node/node_framework/src/implementations/layers/contract_verification_api.rs b/core/node/node_framework/src/implementations/layers/contract_verification_api.rs index 3f1f76cc1c12..2ca7cc25a1fd 100644 --- a/core/node/node_framework/src/implementations/layers/contract_verification_api.rs +++ b/core/node/node_framework/src/implementations/layers/contract_verification_api.rs @@ -69,7 +69,7 @@ impl Task for ContractVerificationApiTask { zksync_contract_verification_server::start_server( self.master_pool, self.replica_pool, - self.config, + self.config.bind_addr(), stop_receiver.0, ) .await From 1bfff0e007e2fb5a4b4b885cf5c69a5cd290888b Mon Sep 17 00:00:00 2001 From: Yury Akudovich Date: Fri, 8 Nov 2024 22:07:51 +0100 Subject: [PATCH 07/23] fix(prover): Remove unneeded dependencies, add default for graceful_shutdown_timeout (#3242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Remove unneeded dependencies. Add default for graceful_shutdown_timeout. ## Why ❔ ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. ref ZKD-1855 --- prover/Cargo.lock | 2 -- prover/crates/bin/prover_autoscaler/Cargo.toml | 2 -- prover/crates/bin/prover_autoscaler/src/config.rs | 5 ++++- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 0a86a44f145d..f119d4bd1951 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -8395,9 +8395,7 @@ dependencies = [ "url", "vise", "zksync_config", - "zksync_core_leftovers", "zksync_prover_job_monitor", - "zksync_types", "zksync_utils", "zksync_vlog", ] diff --git a/prover/crates/bin/prover_autoscaler/Cargo.toml b/prover/crates/bin/prover_autoscaler/Cargo.toml index 88569aa87e94..4e66ecc2b0e3 100644 --- a/prover/crates/bin/prover_autoscaler/Cargo.toml +++ b/prover/crates/bin/prover_autoscaler/Cargo.toml @@ -10,10 +10,8 @@ keywords.workspace = true categories.workspace = true [dependencies] -zksync_core_leftovers.workspace = true zksync_vlog.workspace = true zksync_utils.workspace = true -zksync_types.workspace = true zksync_config = { workspace = true, features = ["observability_ext"] } zksync_prover_job_monitor.workspace = true diff --git a/prover/crates/bin/prover_autoscaler/src/config.rs b/prover/crates/bin/prover_autoscaler/src/config.rs index 6729a5372d56..777ffe89fc91 100644 --- a/prover/crates/bin/prover_autoscaler/src/config.rs +++ b/prover/crates/bin/prover_autoscaler/src/config.rs @@ -11,7 +11,10 @@ use zksync_config::configs::ObservabilityConfig; #[derive(Debug, Clone, PartialEq, Deserialize)] pub struct ProverAutoscalerConfig { /// Amount of time ProverJobMonitor will wait all it's tasks to finish. - #[serde(with = "humantime_serde")] + #[serde( + with = "humantime_serde", + default = "ProverAutoscalerConfig::default_graceful_shutdown_timeout" + )] pub graceful_shutdown_timeout: Duration, pub agent_config: Option, pub scaler_config: Option, From fd913fe6c5e4a4a3b6edc9ac9df8fe0232cbe409 Mon Sep 17 00:00:00 2001 From: perekopskiy <53865202+perekopskiy@users.noreply.github.com> Date: Mon, 11 Nov 2024 10:24:55 +0200 Subject: [PATCH 08/23] chore: ignore derivative advisory (#3247) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Ignores RUSTSEC advisory for `derivative` crate being unmaintained ## Why ❔ ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- deny.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/deny.toml b/deny.toml index dc5a32c2c070..7e2070de65c6 100644 --- a/deny.toml +++ b/deny.toml @@ -17,6 +17,7 @@ ignore = [ "RUSTSEC-2021-0145", "RUSTSEC-2021-0139", "RUSTSEC-2024-0375", + "RUSTSEC-2024-0388", # `derivative` is unmaintained, crypto dependenicies (boojum, circuit_encodings and others) rely on it ] [licenses] From 6ea36d14940a19f638512556ccc4c5127150b5c9 Mon Sep 17 00:00:00 2001 From: Daniyar Itegulov Date: Mon, 11 Nov 2024 17:30:33 +0700 Subject: [PATCH 09/23] feat(api-server): add `yParity` for non-legacy txs (#3246) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Closes #3245 ## Why ❔ See #3245 for more details. TLDR better compatibility with ETH spec. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/lib/dal/src/models/storage_transaction.rs | 8 ++++++++ core/lib/types/src/api/mod.rs | 3 +++ core/lib/types/src/l2/mod.rs | 8 ++++++++ 3 files changed, 19 insertions(+) diff --git a/core/lib/dal/src/models/storage_transaction.rs b/core/lib/dal/src/models/storage_transaction.rs index 459a3ec0c0fb..b55126800cce 100644 --- a/core/lib/dal/src/models/storage_transaction.rs +++ b/core/lib/dal/src/models/storage_transaction.rs @@ -541,6 +541,13 @@ impl StorageApiTransaction { .or_else(|| self.max_fee_per_gas.clone()) .unwrap_or_else(BigDecimal::zero), }; + // Legacy transactions are not supposed to have `yParity` and are reliant on `v` instead. + // Other transactions are required to have `yParity` which replaces the deprecated `v` value + // (still included for backwards compatibility). + let y_parity = match self.tx_format { + None | Some(0) => None, + _ => signature.as_ref().map(|s| U64::from(s.v())), + }; let mut tx = api::Transaction { hash: H256::from_slice(&self.tx_hash), nonce: U256::from(self.nonce.unwrap_or(0) as u64), @@ -553,6 +560,7 @@ impl StorageApiTransaction { gas_price: Some(bigdecimal_to_u256(gas_price)), gas: bigdecimal_to_u256(self.gas_limit.unwrap_or_else(BigDecimal::zero)), input: serde_json::from_value(self.calldata).expect("incorrect calldata in Postgres"), + y_parity, v: signature.as_ref().map(|s| U64::from(s.v())), r: signature.as_ref().map(|s| U256::from(s.r())), s: signature.as_ref().map(|s| U256::from(s.s())), diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index 409dc3727570..5f81e889b537 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -515,6 +515,9 @@ pub struct Transaction { pub gas: U256, /// Input data pub input: Bytes, + /// The parity (0 for even, 1 for odd) of the y-value of the secp256k1 signature + #[serde(rename = "yParity", default, skip_serializing_if = "Option::is_none")] + pub y_parity: Option, /// ECDSA recovery id #[serde(default, skip_serializing_if = "Option::is_none")] pub v: Option, diff --git a/core/lib/types/src/l2/mod.rs b/core/lib/types/src/l2/mod.rs index 48e813e571d2..e7d582ab17a1 100644 --- a/core/lib/types/src/l2/mod.rs +++ b/core/lib/types/src/l2/mod.rs @@ -396,6 +396,13 @@ impl From for api::Transaction { } else { (None, None, None) }; + // Legacy transactions are not supposed to have `yParity` and are reliant on `v` instead. + // Other transactions are required to have `yParity` which replaces the deprecated `v` value + // (still included for backwards compatibility). + let y_parity = match tx.common_data.transaction_type { + TransactionType::LegacyTransaction => None, + _ => v, + }; Self { hash: tx.hash(), @@ -409,6 +416,7 @@ impl From for api::Transaction { max_fee_per_gas: Some(tx.common_data.fee.max_fee_per_gas), gas: tx.common_data.fee.gas_limit, input: Bytes(tx.execute.calldata), + y_parity, v, r, s, From cce765b3b232f0382a7e921fdc4d34a7413cde9f Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Mon, 11 Nov 2024 13:10:59 +0200 Subject: [PATCH 10/23] refactor(utils): Recombobulate utils crate (#3237) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Moves some of functionality of the `utils` crate to `basic_types`, such as conversions, `div_ceil_u256`, and `serde_wrappers`. - Inlines time-related utils at call sites (mostly the state keeper). ## Why ❔ - `utils` has moderately heavyweight deps (`tokio`, `reqwest`), which are not necessary for basic functionality. - Basic functionality better fits in `basic_types` by domain. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- Cargo.lock | 23 +-- core/bin/external_node/Cargo.toml | 1 - core/bin/genesis_generator/Cargo.toml | 1 - .../system-constants-generator/src/utils.rs | 25 ++- core/bin/zksync_server/Cargo.toml | 1 - core/lib/basic_types/src/conversions.rs | 36 ++++ core/lib/basic_types/src/lib.rs | 11 ++ .../src/serde_wrappers.rs | 6 +- core/lib/basic_types/src/web3/mod.rs | 8 + core/lib/constants/Cargo.toml | 1 - core/lib/constants/src/trusted_slots.rs | 3 +- core/lib/contract_verifier/src/tests/mod.rs | 6 +- core/lib/contracts/Cargo.toml | 3 + core/lib/contracts/src/lib.rs | 19 +- core/lib/contracts/src/serde_bytecode.rs | 112 +++++++++++ core/lib/crypto_primitives/Cargo.toml | 1 - .../src/packed_eth_signature.rs | 3 +- core/lib/dal/src/blocks_web3_dal.rs | 3 +- core/lib/dal/src/consensus/conv.rs | 6 +- core/lib/dal/src/contract_verification_dal.rs | 2 +- core/lib/dal/src/factory_deps_dal.rs | 19 +- core/lib/dal/src/models/mod.rs | 27 ++- .../lib/dal/src/models/storage_transaction.rs | 6 +- core/lib/dal/src/models/tests.rs | 49 ++++- core/lib/dal/src/storage_web3_dal.rs | 3 +- core/lib/dal/src/transactions_dal.rs | 6 +- core/lib/merkle_tree/Cargo.toml | 1 - .../src/glue/types/vm/block_context_mode.rs | 2 +- .../multivm/src/glue/types/vm/storage_log.rs | 3 +- .../multivm/src/glue/types/zk_evm_1_3_1.rs | 2 +- .../multivm/src/glue/types/zk_evm_1_3_3.rs | 6 +- .../multivm/src/glue/types/zk_evm_1_4_0.rs | 2 +- .../multivm/src/glue/types/zk_evm_1_4_1.rs | 6 +- .../multivm/src/glue/types/zk_evm_1_5_0.rs | 2 +- .../lib/multivm/src/pubdata_builders/tests.rs | 5 +- .../src/tracers/prestate_tracer/mod.rs | 5 +- core/lib/multivm/src/tracers/validator/mod.rs | 5 +- .../src/tracers/validator/vm_1_4_1/mod.rs | 9 +- .../src/tracers/validator/vm_1_4_2/mod.rs | 9 +- .../validator/vm_boojum_integration/mod.rs | 9 +- .../src/tracers/validator/vm_latest/mod.rs | 10 +- .../validator/vm_refunds_enhancement/mod.rs | 9 +- .../validator/vm_virtual_blocks/mod.rs | 9 +- core/lib/multivm/src/utils/bytecode.rs | 43 +++- core/lib/multivm/src/utils/deduplicator.rs | 6 +- core/lib/multivm/src/utils/events.rs | 3 +- .../src/versions/testonly/block_tip.rs | 4 +- .../src/versions/testonly/code_oracle.rs | 5 +- .../src/versions/testonly/default_aa.rs | 3 +- .../src/versions/testonly/evm_emulator.rs | 9 +- .../versions/testonly/get_used_contracts.rs | 4 +- .../src/versions/testonly/l1_tx_execution.rs | 5 +- .../src/versions/testonly/l2_blocks.rs | 10 +- core/lib/multivm/src/versions/testonly/mod.rs | 10 +- .../src/versions/testonly/secp256r1.rs | 3 +- .../multivm/src/versions/testonly/transfer.rs | 3 +- .../multivm/src/versions/testonly/upgrade.rs | 8 +- .../multivm/src/versions/vm_1_3_2/events.rs | 7 +- .../src/versions/vm_1_3_2/history_recorder.rs | 3 +- .../versions/vm_1_3_2/oracles/decommitter.rs | 7 +- .../src/versions/vm_1_3_2/oracles/storage.rs | 5 +- .../versions/vm_1_3_2/oracles/tracer/utils.rs | 3 +- .../vm_1_3_2/oracles/tracer/validation.rs | 13 +- .../multivm/src/versions/vm_1_3_2/refunds.rs | 3 +- .../src/versions/vm_1_3_2/test_utils.rs | 8 +- .../src/versions/vm_1_3_2/transaction_data.rs | 20 +- .../multivm/src/versions/vm_1_3_2/utils.rs | 3 +- core/lib/multivm/src/versions/vm_1_3_2/vm.rs | 4 +- .../versions/vm_1_3_2/vm_with_bootloader.rs | 30 +-- .../vm_1_4_1/bootloader_state/l2_block.rs | 5 +- .../vm_1_4_1/bootloader_state/utils.rs | 6 +- .../vm_1_4_1/implementation/bytecode.rs | 6 +- .../src/versions/vm_1_4_1/old_vm/events.rs | 7 +- .../vm_1_4_1/old_vm/history_recorder.rs | 3 +- .../vm_1_4_1/old_vm/oracles/decommitter.rs | 8 +- .../src/versions/vm_1_4_1/oracles/storage.rs | 2 +- .../vm_1_4_1/tracers/pubdata_tracer.rs | 23 ++- .../src/versions/vm_1_4_1/tracers/refunds.rs | 6 +- .../src/versions/vm_1_4_1/tracers/utils.rs | 3 +- .../vm_1_4_1/types/internals/pubdata.rs | 2 +- .../types/internals/transaction_data.rs | 20 +- .../vm_1_4_1/types/internals/vm_state.rs | 16 +- .../src/versions/vm_1_4_1/types/l1_batch.rs | 3 +- .../src/versions/vm_1_4_1/utils/fee.rs | 10 +- .../src/versions/vm_1_4_1/utils/l2_blocks.rs | 4 +- .../vm_1_4_2/bootloader_state/l2_block.rs | 5 +- .../vm_1_4_2/bootloader_state/utils.rs | 6 +- .../vm_1_4_2/implementation/bytecode.rs | 8 +- .../src/versions/vm_1_4_2/old_vm/events.rs | 7 +- .../vm_1_4_2/old_vm/history_recorder.rs | 3 +- .../vm_1_4_2/old_vm/oracles/decommitter.rs | 8 +- .../src/versions/vm_1_4_2/oracles/storage.rs | 2 +- .../vm_1_4_2/tracers/pubdata_tracer.rs | 23 ++- .../src/versions/vm_1_4_2/tracers/refunds.rs | 6 +- .../src/versions/vm_1_4_2/tracers/utils.rs | 3 +- .../vm_1_4_2/types/internals/pubdata.rs | 2 +- .../types/internals/transaction_data.rs | 20 +- .../vm_1_4_2/types/internals/vm_state.rs | 16 +- .../src/versions/vm_1_4_2/types/l1_batch.rs | 3 +- .../src/versions/vm_1_4_2/utils/fee.rs | 10 +- .../src/versions/vm_1_4_2/utils/l2_blocks.rs | 4 +- .../bootloader_state/l2_block.rs | 5 +- .../bootloader_state/utils.rs | 6 +- .../implementation/bytecode.rs | 8 +- .../vm_boojum_integration/old_vm/events.rs | 7 +- .../old_vm/history_recorder.rs | 3 +- .../old_vm/oracles/decommitter.rs | 7 +- .../vm_boojum_integration/oracles/storage.rs | 2 +- .../tracers/pubdata_tracer.rs | 23 ++- .../vm_boojum_integration/tracers/refunds.rs | 4 +- .../vm_boojum_integration/tracers/utils.rs | 3 +- .../types/internals/pubdata.rs | 2 +- .../types/internals/transaction_data.rs | 20 +- .../types/internals/vm_state.rs | 16 +- .../vm_boojum_integration/types/l1_batch.rs | 3 +- .../vm_boojum_integration/utils/fee.rs | 10 +- .../vm_boojum_integration/utils/l2_blocks.rs | 4 +- .../vm_boojum_integration/utils/overhead.rs | 3 +- .../vm_fast/bootloader_state/l2_block.rs | 5 +- .../vm_fast/bootloader_state/utils.rs | 6 +- .../multivm/src/versions/vm_fast/bytecode.rs | 4 +- .../multivm/src/versions/vm_fast/events.rs | 5 +- .../src/versions/vm_fast/evm_deploy_tracer.rs | 4 +- core/lib/multivm/src/versions/vm_fast/glue.rs | 6 +- .../vm_fast/initial_bootloader_memory.rs | 3 +- .../multivm/src/versions/vm_fast/pubdata.rs | 2 +- .../multivm/src/versions/vm_fast/refund.rs | 3 +- .../multivm/src/versions/vm_fast/tests/mod.rs | 5 +- .../src/versions/vm_fast/transaction_data.rs | 20 +- core/lib/multivm/src/versions/vm_fast/vm.rs | 6 +- .../vm_latest/bootloader_state/l2_block.rs | 5 +- .../vm_latest/bootloader_state/utils.rs | 6 +- .../vm_latest/implementation/bytecode.rs | 8 +- .../src/versions/vm_latest/old_vm/events.rs | 7 +- .../vm_latest/old_vm/history_recorder.rs | 3 +- .../vm_latest/old_vm/oracles/decommitter.rs | 6 +- .../src/versions/vm_latest/oracles/storage.rs | 5 +- .../src/versions/vm_latest/tests/mod.rs | 9 +- .../vm_latest/tracers/evm_deploy_tracer.rs | 13 +- .../vm_latest/tracers/pubdata_tracer.rs | 23 ++- .../src/versions/vm_latest/tracers/refunds.rs | 3 +- .../src/versions/vm_latest/tracers/utils.rs | 3 +- .../types/internals/transaction_data.rs | 20 +- .../vm_latest/types/internals/vm_state.rs | 21 +- .../src/versions/vm_latest/types/l1_batch.rs | 3 +- .../src/versions/vm_latest/utils/fee.rs | 10 +- .../src/versions/vm_latest/utils/l2_blocks.rs | 4 +- core/lib/multivm/src/versions/vm_latest/vm.rs | 5 +- core/lib/multivm/src/versions/vm_m5/events.rs | 7 +- .../src/versions/vm_m5/history_recorder.rs | 3 +- .../src/versions/vm_m5/oracles/decommitter.rs | 15 +- .../src/versions/vm_m5/oracles/storage.rs | 5 +- .../src/versions/vm_m5/oracles/tracer.rs | 35 ++-- .../lib/multivm/src/versions/vm_m5/refunds.rs | 3 +- .../multivm/src/versions/vm_m5/test_utils.rs | 8 +- .../src/versions/vm_m5/transaction_data.rs | 18 +- core/lib/multivm/src/versions/vm_m5/utils.rs | 3 +- core/lib/multivm/src/versions/vm_m5/vm.rs | 3 +- .../src/versions/vm_m5/vm_with_bootloader.rs | 26 +-- core/lib/multivm/src/versions/vm_m6/events.rs | 7 +- .../src/versions/vm_m6/history_recorder.rs | 3 +- .../src/versions/vm_m6/oracles/decommitter.rs | 15 +- .../src/versions/vm_m6/oracles/storage.rs | 5 +- .../versions/vm_m6/oracles/tracer/utils.rs | 3 +- .../vm_m6/oracles/tracer/validation.rs | 33 ++-- .../lib/multivm/src/versions/vm_m6/refunds.rs | 3 +- .../multivm/src/versions/vm_m6/test_utils.rs | 8 +- .../src/versions/vm_m6/transaction_data.rs | 18 +- core/lib/multivm/src/versions/vm_m6/utils.rs | 3 +- core/lib/multivm/src/versions/vm_m6/vm.rs | 4 +- .../src/versions/vm_m6/vm_with_bootloader.rs | 29 ++- .../bootloader_state/l2_block.rs | 5 +- .../bootloader_state/utils.rs | 8 +- .../implementation/bytecode.rs | 8 +- .../vm_refunds_enhancement/old_vm/events.rs | 7 +- .../old_vm/history_recorder.rs | 3 +- .../old_vm/oracles/decommitter.rs | 7 +- .../vm_refunds_enhancement/oracles/storage.rs | 5 +- .../vm_refunds_enhancement/tracers/refunds.rs | 4 +- .../vm_refunds_enhancement/tracers/utils.rs | 3 +- .../types/internals/transaction_data.rs | 20 +- .../types/internals/vm_state.rs | 16 +- .../vm_refunds_enhancement/types/l1_batch.rs | 3 +- .../vm_refunds_enhancement/utils/fee.rs | 10 +- .../vm_refunds_enhancement/utils/l2_blocks.rs | 4 +- .../vm_refunds_enhancement/utils/overhead.rs | 3 +- .../bootloader_state/l2_block.rs | 5 +- .../bootloader_state/utils.rs | 6 +- .../implementation/bytecode.rs | 8 +- .../vm_virtual_blocks/old_vm/events.rs | 7 +- .../old_vm/history_recorder.rs | 3 +- .../old_vm/oracles/decommitter.rs | 7 +- .../old_vm/oracles/storage.rs | 5 +- .../vm_virtual_blocks/tracers/refunds.rs | 6 +- .../vm_virtual_blocks/tracers/utils.rs | 3 +- .../types/internals/transaction_data.rs | 20 +- .../types/internals/vm_state.rs | 16 +- .../vm_virtual_blocks/types/l1_batch_env.rs | 3 +- .../versions/vm_virtual_blocks/utils/fee.rs | 10 +- .../vm_virtual_blocks/utils/l2_blocks.rs | 4 +- .../vm_virtual_blocks/utils/overhead.rs | 3 +- core/lib/state/Cargo.toml | 1 - core/lib/state/src/storage_factory/mod.rs | 8 +- core/lib/tee_verifier/Cargo.toml | 1 - core/lib/tee_verifier/src/lib.rs | 10 +- core/lib/types/src/abi.rs | 3 +- core/lib/types/src/block.rs | 5 +- core/lib/types/src/commitment/mod.rs | 2 +- core/lib/types/src/fee.rs | 9 +- core/lib/types/src/fee_model.rs | 5 +- core/lib/types/src/l1/mod.rs | 15 +- core/lib/types/src/l2_to_l1_log.rs | 5 +- core/lib/types/src/lib.rs | 14 +- core/lib/types/src/protocol_upgrade.rs | 5 +- core/lib/types/src/snapshots.rs | 6 +- core/lib/types/src/storage/log.rs | 2 +- core/lib/types/src/storage/mod.rs | 3 +- core/lib/types/src/transaction_request.rs | 10 +- core/lib/types/src/tx/execute.rs | 7 +- core/lib/types/src/utils.rs | 5 +- core/lib/utils/Cargo.toml | 6 - core/lib/utils/src/bytecode.rs | 20 +- core/lib/utils/src/convert.rs | 185 ------------------ core/lib/utils/src/format.rs | 78 -------- core/lib/utils/src/lib.rs | 6 - core/lib/utils/src/misc.rs | 55 ------ core/lib/utils/src/time.rs | 19 -- core/lib/vm_executor/src/oneshot/block.rs | 11 +- core/lib/vm_executor/src/oneshot/mod.rs | 4 +- .../src/execution_sandbox/storage.rs | 3 +- core/node/api_server/src/testonly.rs | 3 +- core/node/api_server/src/tx_sender/mod.rs | 3 +- .../api_server/src/web3/namespaces/eth.rs | 6 +- .../api_server/src/web3/namespaces/zks.rs | 3 +- core/node/api_server/src/web3/tests/mod.rs | 10 +- core/node/api_server/src/web3/tests/vm.rs | 3 +- core/node/base_token_adjuster/Cargo.toml | 1 - core/node/commitment_generator/Cargo.toml | 1 - core/node/commitment_generator/src/lib.rs | 2 +- core/node/commitment_generator/src/utils.rs | 11 +- core/node/consensus/Cargo.toml | 1 - core/node/da_dispatcher/Cargo.toml | 1 - core/node/eth_sender/Cargo.toml | 1 - core/node/eth_sender/src/eth_tx_manager.rs | 16 +- core/node/eth_sender/src/metrics.rs | 15 +- core/node/genesis/src/lib.rs | 3 +- core/node/genesis/src/utils.rs | 7 +- core/node/metadata_calculator/Cargo.toml | 1 - .../metadata_calculator/src/api_server/mod.rs | 3 +- core/node/metadata_calculator/src/metrics.rs | 19 +- core/node/metadata_calculator/src/tests.rs | 5 +- core/node/node_sync/src/client.rs | 4 +- core/node/node_sync/src/external_io.rs | 5 +- core/node/node_sync/src/genesis.rs | 9 +- core/node/proof_data_handler/Cargo.toml | 1 - .../state_keeper/src/executor/tests/tester.rs | 2 +- core/node/state_keeper/src/io/mempool.rs | 5 +- core/node/state_keeper/src/io/persistence.rs | 8 +- .../io/seal_logic/l2_block_seal_subtasks.rs | 11 +- .../state_keeper/src/io/seal_logic/mod.rs | 2 +- core/node/state_keeper/src/io/tests/mod.rs | 7 +- core/node/state_keeper/src/mempool_actor.rs | 9 +- .../state_keeper/src/seal_criteria/mod.rs | 17 +- core/node/state_keeper/src/testonly/mod.rs | 8 +- core/node/state_keeper/src/tests/mod.rs | 16 +- core/node/state_keeper/src/utils.rs | 14 ++ core/node/test_utils/Cargo.toml | 1 - core/node/test_utils/src/lib.rs | 2 +- core/node/vm_runner/src/impls/bwip.rs | 33 +++- core/node/vm_runner/src/tests/mod.rs | 5 +- core/tests/test_account/src/lib.rs | 7 +- core/tests/vm-benchmark/src/vm.rs | 4 +- prover/Cargo.lock | 6 - zkstack_cli/Cargo.lock | 6 - 274 files changed, 1214 insertions(+), 1351 deletions(-) create mode 100644 core/lib/basic_types/src/conversions.rs rename core/lib/{utils => basic_types}/src/serde_wrappers.rs (97%) create mode 100644 core/lib/contracts/src/serde_bytecode.rs delete mode 100644 core/lib/utils/src/convert.rs delete mode 100644 core/lib/utils/src/format.rs delete mode 100644 core/lib/utils/src/misc.rs delete mode 100644 core/lib/utils/src/time.rs diff --git a/Cargo.lock b/Cargo.lock index 04a863448d69..bdd2f84527b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3254,7 +3254,6 @@ dependencies = [ "zksync_protobuf", "zksync_protobuf_config", "zksync_types", - "zksync_utils", ] [[package]] @@ -10443,7 +10442,6 @@ dependencies = [ "zksync_external_price_api", "zksync_node_fee_model", "zksync_types", - "zksync_utils", ] [[package]] @@ -10557,7 +10555,6 @@ dependencies = [ "zksync_node_test_utils", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_web3_decl", ] @@ -10846,6 +10843,7 @@ dependencies = [ name = "zksync_contracts" version = "0.1.0" dependencies = [ + "bincode", "envy", "ethabi", "hex", @@ -10883,7 +10881,6 @@ dependencies = [ "sha2 0.10.8", "thiserror", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -10965,7 +10962,6 @@ dependencies = [ "zksync_da_client", "zksync_dal", "zksync_types", - "zksync_utils", ] [[package]] @@ -11083,7 +11079,6 @@ dependencies = [ "zksync_prover_interface", "zksync_shared_metrics", "zksync_types", - "zksync_utils", ] [[package]] @@ -11169,7 +11164,6 @@ dependencies = [ "zksync_state_keeper", "zksync_storage", "zksync_types", - "zksync_utils", "zksync_vlog", "zksync_web3_decl", ] @@ -11354,7 +11348,6 @@ dependencies = [ "zksync_storage", "zksync_system_constants", "zksync_types", - "zksync_utils", ] [[package]] @@ -11389,7 +11382,6 @@ dependencies = [ "zksync_shared_metrics", "zksync_storage", "zksync_types", - "zksync_utils", ] [[package]] @@ -11530,7 +11522,6 @@ dependencies = [ "zksync_system_constants", "zksync_test_account", "zksync_types", - "zksync_utils", "zksync_vm_executor", "zksync_vm_interface", "zksync_web3_decl", @@ -11731,7 +11722,6 @@ dependencies = [ "zksync_merkle_tree", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_vm_interface", ] @@ -11795,7 +11785,6 @@ dependencies = [ "zksync_object_store", "zksync_prover_interface", "zksync_types", - "zksync_utils", "zksync_vm_executor", ] @@ -11933,7 +11922,6 @@ dependencies = [ "zksync_protobuf_config", "zksync_storage", "zksync_types", - "zksync_utils", "zksync_vlog", ] @@ -12010,7 +11998,6 @@ dependencies = [ "zksync_shared_metrics", "zksync_storage", "zksync_types", - "zksync_utils", "zksync_vm_interface", ] @@ -12072,7 +12059,6 @@ version = "0.1.0" dependencies = [ "once_cell", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -12116,7 +12102,6 @@ dependencies = [ "zksync_multivm", "zksync_prover_interface", "zksync_types", - "zksync_utils", ] [[package]] @@ -12174,16 +12159,10 @@ version = "0.1.0" dependencies = [ "anyhow", "assert_matches", - "bigdecimal", - "bincode", "const-decoder", "futures 0.3.31", - "hex", - "num", "once_cell", - "rand 0.8.5", "reqwest 0.12.9", - "serde", "serde_json", "thiserror", "tokio", diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index 4c8f73eda94d..9979e988bbbb 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -20,7 +20,6 @@ zksync_config.workspace = true zksync_protobuf_config.workspace = true zksync_eth_client.workspace = true zksync_storage.workspace = true -zksync_utils.workspace = true zksync_state.workspace = true zksync_contracts.workspace = true zksync_l1_contract_interface.workspace = true diff --git a/core/bin/genesis_generator/Cargo.toml b/core/bin/genesis_generator/Cargo.toml index 1ece9ea09d2e..d0bbcb668713 100644 --- a/core/bin/genesis_generator/Cargo.toml +++ b/core/bin/genesis_generator/Cargo.toml @@ -15,7 +15,6 @@ publish = false zksync_config.workspace = true zksync_env_config.workspace = true zksync_protobuf_config.workspace = true -zksync_utils.workspace = true zksync_types.workspace = true zksync_core_leftovers.workspace = true zksync_dal.workspace = true diff --git a/core/bin/system-constants-generator/src/utils.rs b/core/bin/system-constants-generator/src/utils.rs index 16167975cf0e..2c08de7970b8 100644 --- a/core/bin/system-constants-generator/src/utils.rs +++ b/core/bin/system-constants-generator/src/utils.rs @@ -22,12 +22,13 @@ use zksync_multivm::{ }; use zksync_types::{ block::L2BlockHasher, ethabi::Token, fee::Fee, fee_model::BatchFeeInput, l1::L1Tx, l2::L2Tx, - utils::storage_key_for_eth_balance, AccountTreeId, Address, Execute, K256PrivateKey, - L1BatchNumber, L1TxCommonData, L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, StorageKey, - Transaction, BOOTLOADER_ADDRESS, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_GAS_PRICE_POSITION, - SYSTEM_CONTEXT_TX_ORIGIN_POSITION, U256, ZKPORTER_IS_AVAILABLE, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, Execute, + K256PrivateKey, L1BatchNumber, L1TxCommonData, L2BlockNumber, L2ChainId, Nonce, + ProtocolVersionId, StorageKey, Transaction, BOOTLOADER_ADDRESS, SYSTEM_CONTEXT_ADDRESS, + SYSTEM_CONTEXT_GAS_PRICE_POSITION, SYSTEM_CONTEXT_TX_ORIGIN_POSITION, U256, + ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use crate::intrinsic_costs::VmSpentResourcesResult; @@ -65,7 +66,7 @@ pub static GAS_TEST_SYSTEM_CONTRACTS: Lazy = Lazy::new(|| { let hash = hash_bytecode(&bytecode); let bootloader = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; @@ -74,7 +75,7 @@ pub static GAS_TEST_SYSTEM_CONTRACTS: Lazy = Lazy::new(|| { BaseSystemContracts { default_aa: SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }, bootloader, @@ -214,7 +215,7 @@ pub(super) fn execute_internal_transfer_test() -> u32 { let bytecode = read_bootloader_test_code("transfer_test"); let hash = hash_bytecode(&bytecode); let bootloader = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; @@ -223,7 +224,7 @@ pub(super) fn execute_internal_transfer_test() -> u32 { let bytecode = read_sys_contract_bytecode("", "DefaultAccount", ContractLanguage::Sol); let hash = hash_bytecode(&bytecode); let default_aa = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; @@ -263,7 +264,11 @@ pub(super) fn execute_internal_transfer_test() -> u32 { } input }; - let input: Vec<_> = bytes_to_be_words(input).into_iter().enumerate().collect(); + let input: Vec<_> = input + .chunks(32) + .map(U256::from_big_endian) + .enumerate() + .collect(); let tracer_result = Rc::new(RefCell::new(0)); let tracer = SpecialBootloaderTracer { diff --git a/core/bin/zksync_server/Cargo.toml b/core/bin/zksync_server/Cargo.toml index 031183924064..4cf028be8210 100644 --- a/core/bin/zksync_server/Cargo.toml +++ b/core/bin/zksync_server/Cargo.toml @@ -17,7 +17,6 @@ zksync_env_config.workspace = true zksync_eth_client.workspace = true zksync_protobuf_config.workspace = true zksync_storage.workspace = true -zksync_utils.workspace = true zksync_types.workspace = true zksync_core_leftovers.workspace = true zksync_node_genesis.workspace = true diff --git a/core/lib/basic_types/src/conversions.rs b/core/lib/basic_types/src/conversions.rs new file mode 100644 index 000000000000..544d4adc08f8 --- /dev/null +++ b/core/lib/basic_types/src/conversions.rs @@ -0,0 +1,36 @@ +//! Conversions between basic types. + +use crate::{Address, H256, U256}; + +pub fn h256_to_u256(num: H256) -> U256 { + U256::from_big_endian(num.as_bytes()) +} + +pub fn address_to_h256(address: &Address) -> H256 { + let mut buffer = [0u8; 32]; + buffer[12..].copy_from_slice(address.as_bytes()); + H256(buffer) +} + +pub fn address_to_u256(address: &Address) -> U256 { + h256_to_u256(address_to_h256(address)) +} + +pub fn u256_to_h256(num: U256) -> H256 { + let mut bytes = [0u8; 32]; + num.to_big_endian(&mut bytes); + H256::from_slice(&bytes) +} + +/// Converts `U256` value into an [`Address`]. +pub fn u256_to_address(value: &U256) -> Address { + let mut bytes = [0u8; 32]; + value.to_big_endian(&mut bytes); + + Address::from_slice(&bytes[12..]) +} + +/// Converts `H256` value into an [`Address`]. +pub fn h256_to_address(value: &H256) -> Address { + Address::from_slice(&value.as_bytes()[12..]) +} diff --git a/core/lib/basic_types/src/lib.rs b/core/lib/basic_types/src/lib.rs index 1b462fdf77d1..5776416265d2 100644 --- a/core/lib/basic_types/src/lib.rs +++ b/core/lib/basic_types/src/lib.rs @@ -22,21 +22,32 @@ pub use ethabi::{ }; use serde::{de, Deserialize, Deserializer, Serialize}; +pub use self::conversions::{ + address_to_h256, address_to_u256, h256_to_address, h256_to_u256, u256_to_address, u256_to_h256, +}; + #[macro_use] mod macros; pub mod basic_fri_types; pub mod commitment; +mod conversions; pub mod network; pub mod protocol_version; pub mod prover_dal; pub mod pubdata_da; pub mod secrets; +pub mod serde_wrappers; pub mod settlement; pub mod tee_types; pub mod url; pub mod vm; pub mod web3; +/// Computes `ceil(a / b)`. +pub fn ceil_div_u256(a: U256, b: U256) -> U256 { + (a + b - U256::from(1)) / b +} + /// Parses H256 from a slice of bytes. pub fn parse_h256(bytes: &[u8]) -> anyhow::Result { Ok(<[u8; 32]>::try_from(bytes).context("invalid size")?.into()) diff --git a/core/lib/utils/src/serde_wrappers.rs b/core/lib/basic_types/src/serde_wrappers.rs similarity index 97% rename from core/lib/utils/src/serde_wrappers.rs rename to core/lib/basic_types/src/serde_wrappers.rs index cb9687a8a504..4cc470493dce 100644 --- a/core/lib/utils/src/serde_wrappers.rs +++ b/core/lib/basic_types/src/serde_wrappers.rs @@ -1,3 +1,5 @@ +//! Generic `serde` helpers. + use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; /// Trait for specifying prefix for bytes to hex serialization @@ -61,9 +63,7 @@ pub type ZeroPrefixHexSerde = BytesToHexSerde; #[cfg(test)] mod tests { - use serde::{Deserialize, Serialize}; - - use crate::ZeroPrefixHexSerde; + use super::*; #[derive(Serialize, Deserialize, PartialEq, Debug)] struct Execute { diff --git a/core/lib/basic_types/src/web3/mod.rs b/core/lib/basic_types/src/web3/mod.rs index aa7c49670333..e6d3cab37273 100644 --- a/core/lib/basic_types/src/web3/mod.rs +++ b/core/lib/basic_types/src/web3/mod.rs @@ -73,6 +73,14 @@ pub fn keccak256(bytes: &[u8]) -> [u8; 32] { output } +/// Hashes concatenation of the two provided hashes using `keccak256`. +pub fn keccak256_concat(hash1: H256, hash2: H256) -> H256 { + let mut bytes = [0_u8; 64]; + bytes[..32].copy_from_slice(hash1.as_bytes()); + bytes[32..].copy_from_slice(hash2.as_bytes()); + H256(keccak256(&bytes)) +} + // `Bytes`: from `web3::types::bytes` /// Raw bytes wrapper diff --git a/core/lib/constants/Cargo.toml b/core/lib/constants/Cargo.toml index b741b5734902..bc4d1f7bb57f 100644 --- a/core/lib/constants/Cargo.toml +++ b/core/lib/constants/Cargo.toml @@ -12,6 +12,5 @@ categories.workspace = true [dependencies] zksync_basic_types.workspace = true -zksync_utils.workspace = true once_cell.workspace = true diff --git a/core/lib/constants/src/trusted_slots.rs b/core/lib/constants/src/trusted_slots.rs index e5a626d49036..d66b2bfd4729 100644 --- a/core/lib/constants/src/trusted_slots.rs +++ b/core/lib/constants/src/trusted_slots.rs @@ -1,6 +1,5 @@ use once_cell::sync::Lazy; -use zksync_basic_types::{H256, U256}; -use zksync_utils::h256_to_u256; +use zksync_basic_types::{h256_to_u256, H256, U256}; /// /// Well known-slots (e.g. proxy addresses in popular EIPs). diff --git a/core/lib/contract_verifier/src/tests/mod.rs b/core/lib/contract_verifier/src/tests/mod.rs index 7caa5f32c991..15951e578ff0 100644 --- a/core/lib/contract_verifier/src/tests/mod.rs +++ b/core/lib/contract_verifier/src/tests/mod.rs @@ -7,6 +7,7 @@ use tokio::sync::watch; use zksync_dal::Connection; use zksync_node_test_utils::{create_l1_batch, create_l2_block}; use zksync_types::{ + address_to_h256, contract_verification_api::{CompilerVersions, SourceCodeData, VerificationIncomingRequest}, get_code_key, get_known_code_key, l2::L2Tx, @@ -14,10 +15,7 @@ use zksync_types::{ Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, StorageLog, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, - bytecode::{hash_bytecode, hash_evm_bytecode}, -}; +use zksync_utils::bytecode::{hash_bytecode, hash_evm_bytecode}; use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics, VmEvent}; use super::*; diff --git a/core/lib/contracts/Cargo.toml b/core/lib/contracts/Cargo.toml index 2b80295cf440..efe37b301e28 100644 --- a/core/lib/contracts/Cargo.toml +++ b/core/lib/contracts/Cargo.toml @@ -19,3 +19,6 @@ serde.workspace = true once_cell.workspace = true hex.workspace = true envy.workspace = true + +[dev-dependencies] +bincode.workspace = true diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index cb5be504c8a0..ad9f7739ba0d 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -10,14 +10,12 @@ use std::{ path::{Path, PathBuf}, }; -use ethabi::{ - ethereum_types::{H256, U256}, - Contract, Event, Function, -}; +use ethabi::{ethereum_types::H256, Contract, Event, Function}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, env::Workspace}; +use zksync_utils::{bytecode::hash_bytecode, env::Workspace}; +mod serde_bytecode; pub mod test_contracts; #[derive(Debug, Clone)] @@ -379,7 +377,8 @@ fn read_zbin_bytecode_from_hex_file(bytecode_path: PathBuf) -> Vec { /// Hash of code and code which consists of 32 bytes words #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SystemContractCode { - pub code: Vec, + #[serde(with = "serde_bytecode")] + pub code: Vec, pub hash: H256, } @@ -410,17 +409,15 @@ impl PartialEq for BaseSystemContracts { impl BaseSystemContracts { fn load_with_bootloader(bootloader_bytecode: Vec) -> Self { let hash = hash_bytecode(&bootloader_bytecode); - let bootloader = SystemContractCode { - code: bytes_to_be_words(bootloader_bytecode), + code: bootloader_bytecode, hash, }; let bytecode = read_sys_contract_bytecode("", "DefaultAccount", ContractLanguage::Sol); let hash = hash_bytecode(&bytecode); - let default_aa = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; @@ -442,7 +439,7 @@ impl BaseSystemContracts { let bytecode = read_sys_contract_bytecode("", "EvmEmulator", ContractLanguage::Yul); let hash = hash_bytecode(&bytecode); self.evm_emulator = Some(SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }); self diff --git a/core/lib/contracts/src/serde_bytecode.rs b/core/lib/contracts/src/serde_bytecode.rs new file mode 100644 index 000000000000..43de12e8ddd1 --- /dev/null +++ b/core/lib/contracts/src/serde_bytecode.rs @@ -0,0 +1,112 @@ +use std::fmt; + +use ethabi::ethereum_types::U256; +use serde::{de, de::SeqAccess, ser, ser::SerializeSeq, Deserializer, Serializer}; + +pub(super) fn serialize(bytes: &[u8], serializer: S) -> Result { + if bytes.len() % 32 != 0 { + return Err(ser::Error::custom("bytecode length is not divisible by 32")); + } + let mut seq = serializer.serialize_seq(Some(bytes.len() / 32))?; + for chunk in bytes.chunks(32) { + let word = U256::from_big_endian(chunk); + seq.serialize_element(&word)?; + } + seq.end() +} + +#[derive(Debug)] +struct SeqVisitor; + +impl<'de> de::Visitor<'de> for SeqVisitor { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "sequence of `U256` words") + } + + fn visit_seq>(self, mut seq: A) -> Result { + let len = seq.size_hint().unwrap_or(0) * 32; + let mut bytes = Vec::with_capacity(len); + while let Some(value) = seq.next_element::()? { + let prev_len = bytes.len(); + bytes.resize(prev_len + 32, 0); + value.to_big_endian(&mut bytes[prev_len..]); + } + Ok(bytes) + } +} + +pub(super) fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + deserializer.deserialize_seq(SeqVisitor) +} + +#[cfg(test)] +mod tests { + use ethabi::ethereum_types::{H256, U256}; + use serde::{Deserialize, Serialize}; + + use crate::SystemContractCode; + + /// Code with legacy serialization logic. + #[derive(Debug, Serialize, Deserialize)] + struct LegacySystemContractCode { + code: Vec, + hash: H256, + } + + impl From<&SystemContractCode> for LegacySystemContractCode { + fn from(value: &SystemContractCode) -> Self { + Self { + code: value.code.chunks(32).map(U256::from_big_endian).collect(), + hash: value.hash, + } + } + } + + fn test_code() -> SystemContractCode { + let mut code = vec![0; 32]; + code.extend_from_slice(&[0; 30]); + code.extend_from_slice(&[0xab, 0xcd]); + code.extend_from_slice(&[0x23; 32]); + + SystemContractCode { + hash: H256::repeat_byte(0x42), + code, + } + } + + #[test] + fn serializing_system_contract_code() { + let system_contract_code = test_code(); + let json = serde_json::to_value(&system_contract_code).unwrap(); + assert_eq!( + json, + serde_json::json!({ + "code": ["0x0", "0xabcd", "0x2323232323232323232323232323232323232323232323232323232323232323"], + "hash": "0x4242424242424242424242424242424242424242424242424242424242424242", + }) + ); + + let legacy_code = LegacySystemContractCode::from(&system_contract_code); + let legacy_json = serde_json::to_value(&legacy_code).unwrap(); + assert_eq!(legacy_json, json); + + let restored: SystemContractCode = serde_json::from_value(json).unwrap(); + assert_eq!(restored.code, system_contract_code.code); + assert_eq!(restored.hash, system_contract_code.hash); + } + + #[test] + fn serializing_system_contract_code_using_bincode() { + let system_contract_code = test_code(); + let bytes = bincode::serialize(&system_contract_code).unwrap(); + let restored: SystemContractCode = bincode::deserialize(&bytes).unwrap(); + assert_eq!(restored.code, system_contract_code.code); + assert_eq!(restored.hash, system_contract_code.hash); + + let legacy_code = LegacySystemContractCode::from(&system_contract_code); + let legacy_bytes = bincode::serialize(&legacy_code).unwrap(); + assert_eq!(legacy_bytes, bytes); + } +} diff --git a/core/lib/crypto_primitives/Cargo.toml b/core/lib/crypto_primitives/Cargo.toml index 7efe5279b598..651609ec7949 100644 --- a/core/lib/crypto_primitives/Cargo.toml +++ b/core/lib/crypto_primitives/Cargo.toml @@ -15,7 +15,6 @@ categories.workspace = true secp256k1 = { workspace = true, features = ["global-context"] } sha2.workspace = true blake2.workspace = true -zksync_utils.workspace = true zksync_basic_types.workspace = true thiserror.workspace = true serde_json.workspace = true diff --git a/core/lib/crypto_primitives/src/packed_eth_signature.rs b/core/lib/crypto_primitives/src/packed_eth_signature.rs index 3d76de73560e..c4a26bf351b4 100644 --- a/core/lib/crypto_primitives/src/packed_eth_signature.rs +++ b/core/lib/crypto_primitives/src/packed_eth_signature.rs @@ -1,7 +1,6 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer}; use thiserror::Error; -use zksync_basic_types::{web3::keccak256, Address, H256}; -use zksync_utils::ZeroPrefixHexSerde; +use zksync_basic_types::{serde_wrappers::ZeroPrefixHexSerde, web3::keccak256, Address, H256}; use crate::{ ecdsa_signature::{ diff --git a/core/lib/dal/src/blocks_web3_dal.rs b/core/lib/dal/src/blocks_web3_dal.rs index ba843bbf92f3..4699eac4e5eb 100644 --- a/core/lib/dal/src/blocks_web3_dal.rs +++ b/core/lib/dal/src/blocks_web3_dal.rs @@ -11,12 +11,11 @@ use zksync_types::{ web3::{BlockHeader, Bytes}, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, H160, H256, U256, U64, }; -use zksync_utils::bigdecimal_to_u256; use zksync_vm_interface::Call; use crate::{ models::{ - parse_protocol_version, + bigdecimal_to_u256, parse_protocol_version, storage_block::{ ResolvedL1BatchForL2Block, StorageBlockDetails, StorageL1BatchDetails, LEGACY_BLOCK_GAS_LIMIT, diff --git a/core/lib/dal/src/consensus/conv.rs b/core/lib/dal/src/consensus/conv.rs index f0948adfd1da..3153343d6014 100644 --- a/core/lib/dal/src/consensus/conv.rs +++ b/core/lib/dal/src/consensus/conv.rs @@ -8,15 +8,15 @@ use zksync_types::{ commitment::{L1BatchCommitmentMode, PubdataParams}, ethabi, fee::Fee, + h256_to_u256, l1::{OpProcessingType, PriorityQueueType}, l2::TransactionType, parse_h160, parse_h256, protocol_upgrade::ProtocolUpgradeTxCommonData, transaction_request::PaymasterParams, - Execute, ExecuteTransactionCommon, InputData, L1BatchNumber, L1TxCommonData, L2TxCommonData, - Nonce, PriorityOpId, ProtocolVersionId, Transaction, H256, + u256_to_h256, Execute, ExecuteTransactionCommon, InputData, L1BatchNumber, L1TxCommonData, + L2TxCommonData, Nonce, PriorityOpId, ProtocolVersionId, Transaction, H256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use super::*; diff --git a/core/lib/dal/src/contract_verification_dal.rs b/core/lib/dal/src/contract_verification_dal.rs index 93a4ce2fd35a..0708063dff60 100644 --- a/core/lib/dal/src/contract_verification_dal.rs +++ b/core/lib/dal/src/contract_verification_dal.rs @@ -8,13 +8,13 @@ use std::{ use sqlx::postgres::types::PgInterval; use zksync_db_connection::{error::SqlxContext, instrument::InstrumentExt}; use zksync_types::{ + address_to_h256, contract_verification_api::{ VerificationIncomingRequest, VerificationInfo, VerificationRequest, VerificationRequestStatus, }, web3, Address, CONTRACT_DEPLOYER_ADDRESS, H256, }; -use zksync_utils::address_to_h256; use zksync_vm_interface::VmEvent; use crate::{ diff --git a/core/lib/dal/src/factory_deps_dal.rs b/core/lib/dal/src/factory_deps_dal.rs index 857e2973ae33..424d708da241 100644 --- a/core/lib/dal/src/factory_deps_dal.rs +++ b/core/lib/dal/src/factory_deps_dal.rs @@ -4,7 +4,6 @@ use anyhow::Context as _; use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; use zksync_types::{L2BlockNumber, H256, U256}; -use zksync_utils::{bytes_to_be_words, bytes_to_chunks}; use crate::Core; @@ -102,7 +101,7 @@ impl FactoryDepsDal<'_, '_> { .context("failed loading bootloader code")? .with_context(|| format!("bootloader code with hash {bootloader_hash:?} should be present in the database"))?; let bootloader_code = SystemContractCode { - code: bytes_to_be_words(bootloader_bytecode), + code: bootloader_bytecode, hash: bootloader_hash, }; @@ -113,7 +112,7 @@ impl FactoryDepsDal<'_, '_> { .with_context(|| format!("default account code with hash {default_aa_hash:?} should be present in the database"))?; let default_aa_code = SystemContractCode { - code: bytes_to_be_words(default_aa_bytecode), + code: default_aa_bytecode, hash: default_aa_hash, }; @@ -125,7 +124,7 @@ impl FactoryDepsDal<'_, '_> { .with_context(|| format!("EVM emulator code with hash {evm_emulator_hash:?} should be present in the database"))?; Some(SystemContractCode { - code: bytes_to_be_words(evm_emulator_bytecode), + code: evm_emulator_bytecode, hash: evm_emulator_hash, }) } else { @@ -140,10 +139,7 @@ impl FactoryDepsDal<'_, '_> { } /// Returns bytecodes for factory deps with the specified `hashes`. - pub async fn get_factory_deps( - &mut self, - hashes: &HashSet, - ) -> HashMap> { + pub async fn get_factory_deps(&mut self, hashes: &HashSet) -> HashMap> { let hashes_as_bytes: Vec<_> = hashes.iter().map(H256::as_bytes).collect(); sqlx::query!( @@ -162,12 +158,7 @@ impl FactoryDepsDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| { - ( - U256::from_big_endian(&row.bytecode_hash), - bytes_to_chunks(&row.bytecode), - ) - }) + .map(|row| (U256::from_big_endian(&row.bytecode_hash), row.bytecode)) .collect() } diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index 12e41ac780ad..885dcd46f41f 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -1,6 +1,8 @@ pub mod storage_block; + +use bigdecimal::{num_bigint::BigUint, BigDecimal}; use zksync_db_connection::error::SqlxContext; -use zksync_types::ProtocolVersionId; +use zksync_types::{ProtocolVersionId, U256}; mod call; pub mod storage_base_token_ratio; @@ -24,3 +26,26 @@ pub(crate) fn parse_protocol_version(raw: i32) -> sqlx::Result BigDecimal { + let mut u32_digits = vec![0_u32; 8]; + // `u64_digit`s from `U256` are little-endian + for (i, &u64_digit) in value.0.iter().enumerate() { + u32_digits[2 * i] = u64_digit as u32; + u32_digits[2 * i + 1] = (u64_digit >> 32) as u32; + } + let value = BigUint::new(u32_digits); + BigDecimal::new(value.into(), 0) +} + +/// Converts `BigUint` value into the corresponding `U256` value. +fn biguint_to_u256(value: BigUint) -> U256 { + let bytes = value.to_bytes_le(); + U256::from_little_endian(&bytes) +} + +/// Converts `BigDecimal` value into the corresponding `U256` value. +pub(crate) fn bigdecimal_to_u256(value: BigDecimal) -> U256 { + let bigint = value.with_scale(0).into_bigint_and_exponent().0; + biguint_to_u256(bigint.to_biguint().unwrap()) +} diff --git a/core/lib/dal/src/models/storage_transaction.rs b/core/lib/dal/src/models/storage_transaction.rs index b55126800cce..cceebc85cf2b 100644 --- a/core/lib/dal/src/models/storage_transaction.rs +++ b/core/lib/dal/src/models/storage_transaction.rs @@ -6,6 +6,7 @@ use sqlx::types::chrono::{DateTime, NaiveDateTime, Utc}; use zksync_types::{ api::{self, TransactionDetails, TransactionReceipt, TransactionStatus}, fee::Fee, + h256_to_address, l1::{OpProcessingType, PriorityQueueType}, l2::TransactionType, protocol_upgrade::ProtocolUpgradeTxCommonData, @@ -16,11 +17,10 @@ use zksync_types::{ TransactionTimeRangeConstraint, EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE, H160, H256, PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, U256, U64, }; -use zksync_utils::{bigdecimal_to_u256, h256_to_account_address}; use zksync_vm_interface::Call; use super::call::{LegacyCall, LegacyMixedCall}; -use crate::BigDecimal; +use crate::{models::bigdecimal_to_u256, BigDecimal}; #[derive(Debug, Clone, sqlx::FromRow)] #[cfg_attr(test, derive(Default))] @@ -403,7 +403,7 @@ impl From for TransactionReceipt { ), contract_address: storage_receipt .contract_address - .map(|addr| h256_to_account_address(&H256::from_slice(&addr))), + .map(|addr| h256_to_address(&H256::from_slice(&addr))), logs: vec![], l2_to_l1_logs: vec![], status, diff --git a/core/lib/dal/src/models/tests.rs b/core/lib/dal/src/models/tests.rs index b4949dc101d6..c30c84702b13 100644 --- a/core/lib/dal/src/models/tests.rs +++ b/core/lib/dal/src/models/tests.rs @@ -1,4 +1,6 @@ +use bigdecimal::num_bigint::BigInt; use chrono::Utc; +use rand::{prelude::StdRng, Rng, SeedableRng}; use zksync_types::{ fee::Fee, l1::{OpProcessingType, PriorityQueueType}, @@ -7,9 +9,9 @@ use zksync_types::{ Address, Execute, ExecuteTransactionCommon, Transaction, EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE, H160, H256, PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, U256, }; -use zksync_utils::bigdecimal_to_u256; -use crate::{models::storage_transaction::StorageTransaction, BigDecimal}; +use super::*; +use crate::models::storage_transaction::StorageTransaction; fn default_execute() -> Execute { Execute { @@ -96,6 +98,49 @@ fn l2_storage_tx(tx_format: i32) -> StorageTransaction { } } +#[test] +fn test_u256_to_bigdecimal() { + const RNG_SEED: u64 = 123; + + let mut rng = StdRng::seed_from_u64(RNG_SEED); + // Small values. + for _ in 0..10_000 { + let value: u64 = rng.gen(); + let expected = BigDecimal::from(value); + assert_eq!(u256_to_big_decimal(value.into()), expected); + } + + // Arbitrary values + for _ in 0..10_000 { + let u64_digits: [u64; 4] = rng.gen(); + let value = u64_digits + .iter() + .enumerate() + .map(|(i, &digit)| U256::from(digit) << (i * 64)) + .fold(U256::zero(), |acc, x| acc + x); + let expected_value = u64_digits + .iter() + .enumerate() + .map(|(i, &digit)| BigInt::from(digit) << (i * 64)) + .fold(BigInt::from(0), |acc, x| acc + x); + assert_eq!( + u256_to_big_decimal(value), + BigDecimal::new(expected_value, 0) + ); + } +} + +#[test] +fn test_bigdecimal_to_u256() { + let value = BigDecimal::from(100u32); + let expected = U256::from(100u32); + assert_eq!(bigdecimal_to_u256(value), expected); + + let value = BigDecimal::new(BigInt::from(100), -2); + let expected = U256::from(10000u32); + assert_eq!(bigdecimal_to_u256(value), expected); +} + #[test] fn storage_tx_to_l1_tx() { let stx = l1_storage_tx(); diff --git a/core/lib/dal/src/storage_web3_dal.rs b/core/lib/dal/src/storage_web3_dal.rs index 10d2cfe61525..794f49c59ac4 100644 --- a/core/lib/dal/src/storage_web3_dal.rs +++ b/core/lib/dal/src/storage_web3_dal.rs @@ -6,12 +6,11 @@ use zksync_db_connection::{ instrument::{InstrumentExt, Instrumented}, }; use zksync_types::{ - get_code_key, get_nonce_key, + get_code_key, get_nonce_key, h256_to_u256, utils::{decompose_full_nonce, storage_key_for_standard_token_balance}, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, Nonce, StorageKey, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, H256, U256, }; -use zksync_utils::h256_to_u256; use crate::{models::storage_block::ResolvedL1BatchForL2Block, Core, CoreDal}; diff --git a/core/lib/dal/src/transactions_dal.rs b/core/lib/dal/src/transactions_dal.rs index 9c0889ebfc75..a5dfb8932ddb 100644 --- a/core/lib/dal/src/transactions_dal.rs +++ b/core/lib/dal/src/transactions_dal.rs @@ -15,15 +15,15 @@ use zksync_types::{ L1BlockNumber, L2BlockNumber, PriorityOpId, ProtocolVersionId, Transaction, TransactionTimeRangeConstraint, H256, PROTOCOL_UPGRADE_TX_TYPE, U256, }; -use zksync_utils::u256_to_big_decimal; use zksync_vm_interface::{ tracer::ValidationTraces, Call, TransactionExecutionMetrics, TransactionExecutionResult, TxExecutionStatus, }; use crate::{ - models::storage_transaction::{ - parse_call_trace, serialize_call_into_bytes, StorageTransaction, + models::{ + storage_transaction::{parse_call_trace, serialize_call_into_bytes, StorageTransaction}, + u256_to_big_decimal, }, Core, CoreDal, }; diff --git a/core/lib/merkle_tree/Cargo.toml b/core/lib/merkle_tree/Cargo.toml index 579350bccf4e..e615258ba646 100644 --- a/core/lib/merkle_tree/Cargo.toml +++ b/core/lib/merkle_tree/Cargo.toml @@ -16,7 +16,6 @@ zksync_types.workspace = true zksync_crypto_primitives.workspace = true zksync_storage.workspace = true zksync_prover_interface.workspace = true -zksync_utils.workspace = true anyhow.workspace = true leb128.workspace = true diff --git a/core/lib/multivm/src/glue/types/vm/block_context_mode.rs b/core/lib/multivm/src/glue/types/vm/block_context_mode.rs index 094339705e14..66634e504386 100644 --- a/core/lib/multivm/src/glue/types/vm/block_context_mode.rs +++ b/core/lib/multivm/src/glue/types/vm/block_context_mode.rs @@ -1,4 +1,4 @@ -use zksync_utils::h256_to_u256; +use zksync_types::h256_to_u256; use crate::glue::GlueFrom; diff --git a/core/lib/multivm/src/glue/types/vm/storage_log.rs b/core/lib/multivm/src/glue/types/vm/storage_log.rs index 322bc491e9ab..5f79ca9e9e15 100644 --- a/core/lib/multivm/src/glue/types/vm/storage_log.rs +++ b/core/lib/multivm/src/glue/types/vm/storage_log.rs @@ -1,7 +1,6 @@ use zksync_types::{ - zk_evm_types::LogQuery, StorageLog, StorageLogQuery, StorageLogWithPreviousValue, + u256_to_h256, zk_evm_types::LogQuery, StorageLog, StorageLogQuery, StorageLogWithPreviousValue, }; -use zksync_utils::u256_to_h256; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs b/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs index dfe1121c04ec..08556b7b901a 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs @@ -1,4 +1,4 @@ -use zksync_utils::u256_to_h256; +use zksync_types::u256_to_h256; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs b/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs index 4c554c1bd53d..ab13de140cfb 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs @@ -2,8 +2,10 @@ use zk_evm_1_3_3::{ aux_structures::{LogQuery as LogQuery_1_3_3, Timestamp as Timestamp_1_3_3}, zkevm_opcode_defs::FarCallOpcode as FarCallOpcode_1_3_3, }; -use zksync_types::zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}; -use zksync_utils::u256_to_h256; +use zksync_types::{ + u256_to_h256, + zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}, +}; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs b/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs index 5af0e57c4bf9..c25a19b1aa3d 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs @@ -1,4 +1,4 @@ -use zksync_utils::u256_to_h256; +use zksync_types::u256_to_h256; use crate::glue::GlueFrom; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs b/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs index 933eafbb0354..6a8138bc2f24 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs @@ -2,8 +2,10 @@ use zk_evm_1_4_1::{ aux_structures::{LogQuery as LogQuery_1_4_1, Timestamp as Timestamp_1_4_1}, zkevm_opcode_defs::FarCallOpcode as FarCallOpcode_1_4_1, }; -use zksync_types::zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}; -use zksync_utils::u256_to_h256; +use zksync_types::{ + u256_to_h256, + zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}, +}; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs b/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs index eb1c8e1dd7e8..343843503bdd 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs @@ -1,4 +1,4 @@ -use zksync_utils::u256_to_h256; +use zksync_types::u256_to_h256; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/pubdata_builders/tests.rs b/core/lib/multivm/src/pubdata_builders/tests.rs index bc24b8e47346..b06cb9405aa7 100644 --- a/core/lib/multivm/src/pubdata_builders/tests.rs +++ b/core/lib/multivm/src/pubdata_builders/tests.rs @@ -1,8 +1,7 @@ use zksync_types::{ - writes::StateDiffRecord, Address, ProtocolVersionId, ACCOUNT_CODE_STORAGE_ADDRESS, - BOOTLOADER_ADDRESS, + u256_to_h256, writes::StateDiffRecord, Address, ProtocolVersionId, + ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, }; -use zksync_utils::u256_to_h256; use super::{rollup::RollupPubdataBuilder, validium::ValidiumPubdataBuilder}; use crate::interface::pubdata::{L1MessengerL2ToL1Log, PubdataBuilder, PubdataInput}; diff --git a/core/lib/multivm/src/tracers/prestate_tracer/mod.rs b/core/lib/multivm/src/tracers/prestate_tracer/mod.rs index e8a7cc2cc420..363480c016bf 100644 --- a/core/lib/multivm/src/tracers/prestate_tracer/mod.rs +++ b/core/lib/multivm/src/tracers/prestate_tracer/mod.rs @@ -2,10 +2,9 @@ use std::{collections::HashMap, fmt, sync::Arc}; use once_cell::sync::OnceCell; use zksync_types::{ - get_code_key, get_nonce_key, web3::keccak256, AccountTreeId, Address, StorageKey, StorageValue, - H160, H256, L2_BASE_TOKEN_ADDRESS, U256, + address_to_h256, get_code_key, get_nonce_key, h256_to_u256, web3::keccak256, AccountTreeId, + Address, StorageKey, StorageValue, H160, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{address_to_h256, h256_to_u256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/tracers/validator/mod.rs b/core/lib/multivm/src/tracers/validator/mod.rs index a095be9f3748..88249467a575 100644 --- a/core/lib/multivm/src/tracers/validator/mod.rs +++ b/core/lib/multivm/src/tracers/validator/mod.rs @@ -10,9 +10,9 @@ use zksync_system_constants::{ L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, }; use zksync_types::{ - vm::VmVersion, web3::keccak256, AccountTreeId, Address, StorageKey, H256, U256, + address_to_u256, u256_to_h256, vm::VmVersion, web3::keccak256, AccountTreeId, Address, + StorageKey, H256, U256, }; -use zksync_utils::{address_to_u256, be_bytes_to_safe_address, u256_to_h256}; use zksync_vm_interface::{ tracer::{TimestampAsserterParams, ValidationTraces}, L1BatchEnv, @@ -25,6 +25,7 @@ use crate::{ storage::{StoragePtr, WriteStorage}, tracer::{ValidationParams, ViolatedValidationRule}, }, + utils::bytecode::be_bytes_to_safe_address, }; mod types; diff --git a/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs b/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs index d1ddb2b44c80..3b5636c1c528 100644 --- a/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs b/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs index a51644ff9ea2..0a48792aaa9e 100644 --- a/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs b/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs index 7f9767a5e632..da6ffd4948cf 100644 --- a/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_4_0::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs b/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs index d3dc7fd87c42..3c819384137f 100644 --- a/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs @@ -3,8 +3,10 @@ use zk_evm_1_5_0::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256, U256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, + U256, +}; use crate::{ interface::{ @@ -48,7 +50,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -161,7 +163,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs b/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs index 0badd7c58775..ea95c567181e 100644 --- a/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs b/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs index 86a639915c9d..94f31ddf138d 100644 --- a/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/utils/bytecode.rs b/core/lib/multivm/src/utils/bytecode.rs index 260749b44f3c..c1937e992990 100644 --- a/core/lib/multivm/src/utils/bytecode.rs +++ b/core/lib/multivm/src/utils/bytecode.rs @@ -1,10 +1,51 @@ use std::collections::HashMap; -use zksync_types::ethabi::{self, Token}; +use zksync_types::{ + ethabi::{self, Token}, + Address, H256, U256, +}; use zksync_utils::bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}; use crate::interface::CompressedBytecodeInfo; +pub(crate) fn be_chunks_to_h256_words(chunks: Vec<[u8; 32]>) -> Vec { + chunks.into_iter().map(|el| H256::from_slice(&el)).collect() +} + +pub(crate) fn be_words_to_bytes(words: &[U256]) -> Vec { + words + .iter() + .flat_map(|w| { + let mut bytes = [0u8; 32]; + w.to_big_endian(&mut bytes); + bytes + }) + .collect() +} + +pub(crate) fn bytes_to_be_words(bytes: &[u8]) -> Vec { + assert_eq!( + bytes.len() % 32, + 0, + "Bytes must be divisible by 32 to split into chunks" + ); + bytes.chunks(32).map(U256::from_big_endian).collect() +} + +pub(crate) fn be_bytes_to_safe_address(bytes: &[u8]) -> Option
{ + if bytes.len() < 20 { + return None; + } + + let (zero_bytes, address_bytes) = bytes.split_at(bytes.len() - 20); + + if zero_bytes.iter().any(|b| *b != 0) { + None + } else { + Some(Address::from_slice(address_bytes)) + } +} + #[derive(Debug, thiserror::Error)] pub(crate) enum FailedToCompressBytecodeError { #[error("Number of unique 8-bytes bytecode chunks exceed the limit of 2^16 - 1")] diff --git a/core/lib/multivm/src/utils/deduplicator.rs b/core/lib/multivm/src/utils/deduplicator.rs index e9a870e6901d..0cb4c3fa7cd8 100644 --- a/core/lib/multivm/src/utils/deduplicator.rs +++ b/core/lib/multivm/src/utils/deduplicator.rs @@ -1,10 +1,9 @@ use std::collections::HashMap; use zksync_types::{ - writes::compression::compress_with_best_strategy, StorageKey, StorageLogKind, + h256_to_u256, writes::compression::compress_with_best_strategy, StorageKey, StorageLogKind, StorageLogWithPreviousValue, H256, }; -use zksync_utils::h256_to_u256; use crate::interface::DeduplicatedWritesMetrics; @@ -211,8 +210,7 @@ impl StorageWritesDeduplicator { #[cfg(test)] mod tests { - use zksync_types::{AccountTreeId, StorageLog, H160, U256}; - use zksync_utils::u256_to_h256; + use zksync_types::{u256_to_h256, AccountTreeId, StorageLog, H160, U256}; use super::*; diff --git a/core/lib/multivm/src/utils/events.rs b/core/lib/multivm/src/utils/events.rs index d84651989e75..37124b822040 100644 --- a/core/lib/multivm/src/utils/events.rs +++ b/core/lib/multivm/src/utils/events.rs @@ -93,8 +93,7 @@ mod tests { use zksync_system_constants::{ BOOTLOADER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L2_BASE_TOKEN_ADDRESS, }; - use zksync_types::{Address, L1BatchNumber}; - use zksync_utils::u256_to_h256; + use zksync_types::{u256_to_h256, Address, L1BatchNumber}; use super::*; diff --git a/core/lib/multivm/src/versions/testonly/block_tip.rs b/core/lib/multivm/src/versions/testonly/block_tip.rs index 220653308a7e..61678e01a443 100644 --- a/core/lib/multivm/src/versions/testonly/block_tip.rs +++ b/core/lib/multivm/src/versions/testonly/block_tip.rs @@ -6,9 +6,9 @@ use zksync_system_constants::{ }; use zksync_types::{ commitment::SerializeCommitment, fee_model::BatchFeeInput, get_code_key, - l2_to_l1_log::L2ToL1Log, writes::StateDiffRecord, Address, Execute, H256, U256, + l2_to_l1_log::L2ToL1Log, u256_to_h256, writes::StateDiffRecord, Address, Execute, H256, U256, }; -use zksync_utils::{bytecode::hash_bytecode, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use super::{ default_pubdata_builder, get_complex_upgrade_abi, get_empty_storage, read_complex_upgrade, diff --git a/core/lib/multivm/src/versions/testonly/code_oracle.rs b/core/lib/multivm/src/versions/testonly/code_oracle.rs index 767a294f44ab..6a1fcfb28df3 100644 --- a/core/lib/multivm/src/versions/testonly/code_oracle.rs +++ b/core/lib/multivm/src/versions/testonly/code_oracle.rs @@ -1,8 +1,9 @@ use ethabi::Token; use zksync_types::{ - get_known_code_key, web3::keccak256, Address, Execute, StorageLogWithPreviousValue, U256, + get_known_code_key, h256_to_u256, u256_to_h256, web3::keccak256, Address, Execute, + StorageLogWithPreviousValue, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use super::{ get_empty_storage, load_precompiles_contract, read_precompiles_contract, read_test_contract, diff --git a/core/lib/multivm/src/versions/testonly/default_aa.rs b/core/lib/multivm/src/versions/testonly/default_aa.rs index c69c00de4508..a05d42d3918f 100644 --- a/core/lib/multivm/src/versions/testonly/default_aa.rs +++ b/core/lib/multivm/src/versions/testonly/default_aa.rs @@ -1,11 +1,10 @@ use zksync_test_account::{DeployContractsTx, TxType}; use zksync_types::{ - get_code_key, get_known_code_key, get_nonce_key, + get_code_key, get_known_code_key, get_nonce_key, h256_to_u256, system_contracts::{DEPLOYMENT_NONCE_INCREMENT, TX_NONCE_INCREMENT}, utils::storage_key_for_eth_balance, U256, }; -use zksync_utils::h256_to_u256; use super::{default_pubdata_builder, read_test_contract, tester::VmTesterBuilder, TestedVm}; use crate::{ diff --git a/core/lib/multivm/src/versions/testonly/evm_emulator.rs b/core/lib/multivm/src/versions/testonly/evm_emulator.rs index a77274ec581c..1e7e136e4b99 100644 --- a/core/lib/multivm/src/versions/testonly/evm_emulator.rs +++ b/core/lib/multivm/src/versions/testonly/evm_emulator.rs @@ -9,14 +9,11 @@ use zksync_system_constants::{ }; use zksync_test_account::TxType; use zksync_types::{ - get_code_key, get_known_code_key, + get_code_key, get_known_code_key, h256_to_u256, utils::{key_for_eth_balance, storage_key_for_eth_balance}, AccountTreeId, Address, Execute, StorageKey, H256, U256, }; -use zksync_utils::{ - bytecode::{hash_bytecode, hash_evm_bytecode}, - bytes_to_be_words, h256_to_u256, -}; +use zksync_utils::bytecode::{hash_bytecode, hash_evm_bytecode}; use super::{default_system_env, TestedVm, VmTester, VmTesterBuilder}; use crate::interface::{ @@ -97,7 +94,7 @@ impl EvmTestBuilder { system_env.base_system_smart_contracts.evm_emulator = Some(SystemContractCode { hash: hash_bytecode(&mock_emulator), - code: bytes_to_be_words(mock_emulator), + code: mock_emulator, }); } else { let emulator_hash = hash_bytecode(&mock_emulator); diff --git a/core/lib/multivm/src/versions/testonly/get_used_contracts.rs b/core/lib/multivm/src/versions/testonly/get_used_contracts.rs index 9d0908807e21..fa6470b44c47 100644 --- a/core/lib/multivm/src/versions/testonly/get_used_contracts.rs +++ b/core/lib/multivm/src/versions/testonly/get_used_contracts.rs @@ -5,8 +5,8 @@ use ethabi::Token; use zk_evm_1_3_1::zkevm_opcode_defs::decoding::{EncodingModeProduction, VmEncodingMode}; use zksync_system_constants::CONTRACT_DEPLOYER_ADDRESS; use zksync_test_account::{Account, TxType}; -use zksync_types::{AccountTreeId, Address, Execute, StorageKey, H256, U256}; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{h256_to_u256, AccountTreeId, Address, Execute, StorageKey, H256, U256}; +use zksync_utils::bytecode::hash_bytecode; use super::{ read_proxy_counter_contract, read_test_contract, diff --git a/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs b/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs index 37a2bf2bec20..4a33f478b419 100644 --- a/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs +++ b/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs @@ -4,11 +4,10 @@ use zksync_contracts::l1_messenger_contract; use zksync_system_constants::{BOOTLOADER_ADDRESS, L1_MESSENGER_ADDRESS}; use zksync_test_account::TxType; use zksync_types::{ - get_code_key, get_known_code_key, + get_code_key, get_known_code_key, h256_to_u256, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, - Address, Execute, ExecuteTransactionCommon, U256, + u256_to_h256, Address, Execute, ExecuteTransactionCommon, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use super::{ read_test_contract, tester::VmTesterBuilder, ContractToDeploy, TestedVm, BASE_SYSTEM_CONTRACTS, diff --git a/core/lib/multivm/src/versions/testonly/l2_blocks.rs b/core/lib/multivm/src/versions/testonly/l2_blocks.rs index 947d8b5859f8..0dfe600b73be 100644 --- a/core/lib/multivm/src/versions/testonly/l2_blocks.rs +++ b/core/lib/multivm/src/versions/testonly/l2_blocks.rs @@ -7,12 +7,12 @@ use assert_matches::assert_matches; use zksync_system_constants::REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE; use zksync_types::{ block::{pack_block_info, L2BlockHasher}, - AccountTreeId, Address, Execute, ExecuteTransactionCommon, L1BatchNumber, L1TxCommonData, - L2BlockNumber, ProtocolVersionId, StorageKey, Transaction, H256, SYSTEM_CONTEXT_ADDRESS, - SYSTEM_CONTEXT_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, - SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, U256, + h256_to_u256, u256_to_h256, AccountTreeId, Address, Execute, ExecuteTransactionCommon, + L1BatchNumber, L1TxCommonData, L2BlockNumber, ProtocolVersionId, StorageKey, Transaction, H256, + SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_BLOCK_INFO_POSITION, + SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, + U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use super::{default_l1_batch, get_empty_storage, tester::VmTesterBuilder, TestedVm}; use crate::{ diff --git a/core/lib/multivm/src/versions/testonly/mod.rs b/core/lib/multivm/src/versions/testonly/mod.rs index 309c0edff583..3377a49064f8 100644 --- a/core/lib/multivm/src/versions/testonly/mod.rs +++ b/core/lib/multivm/src/versions/testonly/mod.rs @@ -18,11 +18,11 @@ use zksync_contracts::{ SystemContractCode, }; use zksync_types::{ - block::L2BlockHasher, fee_model::BatchFeeInput, get_code_key, get_is_account_key, - utils::storage_key_for_eth_balance, Address, L1BatchNumber, L2BlockNumber, L2ChainId, - ProtocolVersionId, U256, + block::L2BlockHasher, fee_model::BatchFeeInput, get_code_key, get_is_account_key, h256_to_u256, + u256_to_h256, utils::storage_key_for_eth_balance, Address, L1BatchNumber, L2BlockNumber, + L2ChainId, ProtocolVersionId, U256, }; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{ pubdata::PubdataBuilder, L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode, }; @@ -133,7 +133,7 @@ pub(crate) fn get_bootloader(test: &str) -> SystemContractCode { let bootloader_code = read_bootloader_code(test); let bootloader_hash = hash_bytecode(&bootloader_code); SystemContractCode { - code: bytes_to_be_words(bootloader_code), + code: bootloader_code, hash: bootloader_hash, } } diff --git a/core/lib/multivm/src/versions/testonly/secp256r1.rs b/core/lib/multivm/src/versions/testonly/secp256r1.rs index 37d428f82101..8a6077ab522f 100644 --- a/core/lib/multivm/src/versions/testonly/secp256r1.rs +++ b/core/lib/multivm/src/versions/testonly/secp256r1.rs @@ -1,7 +1,6 @@ use zk_evm_1_5_0::zkevm_opcode_defs::p256; use zksync_system_constants::P256VERIFY_PRECOMPILE_ADDRESS; -use zksync_types::{web3::keccak256, Execute, H256, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, web3::keccak256, Execute, H256, U256}; use super::{tester::VmTesterBuilder, TestedVm}; use crate::interface::{ExecutionResult, InspectExecutionMode, TxExecutionMode, VmInterfaceExt}; diff --git a/core/lib/multivm/src/versions/testonly/transfer.rs b/core/lib/multivm/src/versions/testonly/transfer.rs index 3572adba147c..86588cc5f681 100644 --- a/core/lib/multivm/src/versions/testonly/transfer.rs +++ b/core/lib/multivm/src/versions/testonly/transfer.rs @@ -1,7 +1,6 @@ use ethabi::Token; use zksync_contracts::{load_contract, read_bytecode}; -use zksync_types::{utils::storage_key_for_eth_balance, Address, Execute, U256}; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, utils::storage_key_for_eth_balance, Address, Execute, U256}; use super::{ default_pubdata_builder, get_empty_storage, tester::VmTesterBuilder, ContractToDeploy, TestedVm, diff --git a/core/lib/multivm/src/versions/testonly/upgrade.rs b/core/lib/multivm/src/versions/testonly/upgrade.rs index 359f19faedb2..3fb6257f7070 100644 --- a/core/lib/multivm/src/versions/testonly/upgrade.rs +++ b/core/lib/multivm/src/versions/testonly/upgrade.rs @@ -2,13 +2,13 @@ use zksync_contracts::{deployer_contract, load_sys_contract, read_bytecode}; use zksync_test_account::TxType; use zksync_types::{ ethabi::{Contract, Token}, - get_code_key, get_known_code_key, + get_code_key, get_known_code_key, h256_to_u256, protocol_upgrade::ProtocolUpgradeTxCommonData, - Address, Execute, ExecuteTransactionCommon, Transaction, COMPLEX_UPGRADER_ADDRESS, - CONTRACT_DEPLOYER_ADDRESS, CONTRACT_FORCE_DEPLOYER_ADDRESS, H256, + u256_to_h256, Address, Execute, ExecuteTransactionCommon, Transaction, + COMPLEX_UPGRADER_ADDRESS, CONTRACT_DEPLOYER_ADDRESS, CONTRACT_FORCE_DEPLOYER_ADDRESS, H256, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use super::{ get_complex_upgrade_abi, get_empty_storage, read_complex_upgrade, read_test_contract, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/events.rs b/core/lib/multivm/src/versions/vm_1_3_2/events.rs index 7b1f03c8ac99..0e62312185a2 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/events.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_3::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub fn merge_events(events: Vec) -> Vec { .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs b/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs index 2912fad2841d..bfd33b4b355e 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs @@ -1,8 +1,7 @@ use std::{collections::HashMap, fmt::Debug, hash::Hash}; use zk_evm_1_3_3::{aux_structures::Timestamp, vm_state::PrimitiveValue, zkevm_opcode_defs}; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs index e9a85f8ba4b1..07fcdb0f522f 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs @@ -6,12 +6,13 @@ use zk_evm_1_3_3::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; use crate::{ interface::storage::{StoragePtr, WriteStorage}, + utils::bytecode::bytes_to_be_words, vm_1_3_2::history_recorder::{HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory}, }; @@ -59,7 +60,7 @@ impl DecommitterOracle .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs index ac4cc3df1706..e3614cbd471c 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs index 86ed02365a94..ef2d4f0b5769 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_1_3_2::{ history_recorder::HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs index f52b6b8940db..fbb6795d89a3 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs @@ -11,13 +11,14 @@ use zksync_system_constants::{ KECCAK256_PRECOMPILE_ADDRESS, L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, }; -use zksync_types::{get_code_key, web3::keccak256, AccountTreeId, Address, StorageKey, H256, U256}; -use zksync_utils::{ - be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, web3::keccak256, AccountTreeId, + Address, StorageKey, H256, U256, }; use crate::{ interface::storage::{StoragePtr, WriteStorage}, + utils::bytecode::be_bytes_to_safe_address, vm_1_3_2::{ errors::VmRevertReasonParsingResult, history_recorder::HistoryMode, @@ -242,7 +243,7 @@ impl ValidationTracer { // The user is allowed to touch its own slots or slots semantically related to him. let valid_users_slot = address == self.user_address - || u256_to_account_address(&key) == self.user_address + || u256_to_address(&key) == self.user_address || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); if valid_users_slot { return true; @@ -309,7 +310,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -376,7 +377,7 @@ impl ValidationTracer { let value = self.storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs b/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs index 163992516d27..b0d70c3522c4 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs @@ -1,6 +1,5 @@ use zk_evm_1_3_3::aux_structures::Timestamp; -use zksync_types::U256; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, U256}; use crate::{ interface::storage::WriteStorage, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs index 34c70e0f9c45..42106bbdfa22 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs @@ -12,13 +12,13 @@ use itertools::Itertools; use zk_evm_1_3_3::{aux_structures::Timestamp, vm_state::VmLocalState}; use zksync_contracts::deployer_contract; use zksync_types::{ + address_to_h256, ethabi::{Address, Token}, + h256_to_address, u256_to_h256, web3::keccak256, Execute, Nonce, StorageKey, StorageValue, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, h256_to_account_address, u256_to_h256, -}; +use zksync_utils::bytecode::hash_bytecode; use crate::interface::storage::WriteStorage; /// The tests here help us with the testing the VM @@ -174,7 +174,7 @@ pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) - let hash = keccak256(&digest); - h256_to_account_address(&H256(hash)) + h256_to_address(&H256(hash)) } pub fn verify_required_storage( diff --git a/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs b/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs index 0285320daa30..e63b6ec5a87d 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs @@ -1,19 +1,22 @@ use zk_evm_1_3_3::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_types::{ + address_to_h256, ceil_div_u256, ethabi::{encode, Address, Token}, fee::encoding_len, + h256_to_u256, l1::is_l1_tx_type, l2::TransactionType, ExecuteTransactionCommon, Transaction, MAX_L2_TX_GAS_LIMIT, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, ceil_div_u256, h256_to_u256, -}; +use zksync_utils::bytecode::hash_bytecode; use super::vm_with_bootloader::MAX_TXS_IN_BLOCK; -use crate::vm_1_3_2::vm_with_bootloader::{ - BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, - MAX_GAS_PER_PUBDATA_BYTE, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_1_3_2::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, + MAX_GAS_PER_PUBDATA_BYTE, + }, }; // This structure represents the data that is used by @@ -197,10 +200,7 @@ impl TransactionData { } pub fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_1_3_2/utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/utils.rs index 7870b1ff7443..5c72ba204d89 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/utils.rs @@ -7,8 +7,7 @@ use zk_evm_1_3_3::{ }; use zksync_contracts::BaseSystemContracts; use zksync_system_constants::ZKPORTER_IS_AVAILABLE; -use zksync_types::{Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; use crate::{ interface::storage::WriteStorage, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/vm.rs b/core/lib/multivm/src/versions/vm_1_3_2/vm.rs index d9768652c2f3..45b8a09c5a34 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/vm.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/vm.rs @@ -1,7 +1,7 @@ use std::{collections::HashSet, rc::Rc}; -use zksync_types::Transaction; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{h256_to_u256, Transaction}; +use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs index fd4d483fba5e..eee1baa59d60 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs @@ -15,16 +15,14 @@ use zk_evm_1_3_3::{ use zksync_contracts::BaseSystemContracts; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; use zksync_types::{ - fee_model::L1PeggedBatchFeeModelInput, l1::is_l1_tx_type, Address, Transaction, - BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, -}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, misc::ceil_div, + address_to_u256, fee_model::L1PeggedBatchFeeModelInput, h256_to_u256, l1::is_l1_tx_type, + Address, Transaction, BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, }; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{storage::WriteStorage, CompressedBytecodeInfo, L1BatchEnv}, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_1_3_2::{ bootloader_state::BootloaderState, history_recorder::HistoryMode, @@ -84,8 +82,11 @@ pub(crate) fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { pub fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } pub(crate) fn derive_base_fee_and_gas_per_pubdata( @@ -102,7 +103,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( @@ -391,7 +392,7 @@ pub fn init_vm_inner( oracle_tools.decommittment_processor.populate( vec![( h256_to_u256(base_system_contract.default_aa.hash), - base_system_contract.default_aa.code.clone(), + bytes_to_be_words(&base_system_contract.default_aa.code), )], Timestamp(0), ); @@ -399,7 +400,7 @@ pub fn init_vm_inner( oracle_tools.memory.populate( vec![( BOOTLOADER_CODE_PAGE, - base_system_contract.bootloader.code.clone(), + bytes_to_be_words(&base_system_contract.bootloader.code), )], Timestamp(0), ); @@ -645,7 +646,7 @@ pub(crate) fn get_bootloader_memory_for_encoded_tx( .flat_map(bytecode::encode_call) .collect(); - let memory_addition = bytes_to_be_words(memory_addition); + let memory_addition = bytes_to_be_words(&memory_addition); memory.extend( (compressed_bytecodes_offset..compressed_bytecodes_offset + memory_addition.len()) @@ -727,12 +728,11 @@ fn formal_calldata_abi() -> PrimitiveValue { } } +// FIXME: &[u8] pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs index d3c428ab282b..a5157e323408 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs index 1acf75b27e1b..33b15e68005b 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -25,8 +24,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs index 5f24f2465a32..c8adcf116d74 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs @@ -1,6 +1,6 @@ use itertools::Itertools; use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ @@ -34,9 +34,7 @@ impl Vm { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytecode::bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs index ffa4b4d50b8e..bc5befe3810c 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_4_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs index c9d899742202..bfd7b9130f50 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_4_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs index 636a4058a037..e4df1e4c7fcd 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs @@ -6,16 +6,18 @@ use zk_evm_1_4_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_1_4_1::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, }; + /// The main job of the DecommiterOracle is to implement the DecommittmentProcessor trait - that is /// used by the VM to 'load' bytecodes into memory. #[derive(Debug)] @@ -60,7 +62,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs b/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs index 3debfd1ca627..921e9b81f71f 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs @@ -6,6 +6,7 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ + u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -13,7 +14,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs index 6f927c5c99a8..a51c5ce46197 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_4_1::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use crate::{ interface::{ @@ -16,9 +18,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_4_1::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_1_4_1::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -100,15 +105,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs index 2586d8d7f873..ab883d9bcbd2 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs @@ -8,8 +8,10 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::L1_MESSAGE_PUBDATA_BYTES, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, H256, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ + ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, H256, U256, +}; +use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ interface::{ diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs b/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs index 7b24e482b72d..536ea79e22f9 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_1_4_1::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs index c1ca93152a03..f938696297b5 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs index f7384da76d0d..872ab3d05dd5 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_1_4_1::{ - constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, - utils::overhead::derive_overhead, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_1_4_1::{ + constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -196,10 +201,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs index b91733c7ca14..9c3ecd9741a3 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_4_1::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_1_4_1::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs index ca2f0688154b..31807cb66cc1 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_1_4_1::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs b/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs index b5d4cc971b9e..7f214b457317 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::PubdataIndependentBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{interface::L1BatchEnv, vm_1_4_1::constants::MAX_GAS_PER_PUBDATA_BYTE}; @@ -18,11 +17,14 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(fair_pubdata_price, MAX_GAS_PER_PUBDATA_BYTE), + fair_pubdata_price.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); - let gas_per_pubdata = ceil_div(fair_pubdata_price, base_fee); - + let gas_per_pubdata = if fair_pubdata_price == 0 { + 0 + } else { + fair_pubdata_price.div_ceil(base_fee) + }; (base_fee, gas_per_pubdata) } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs index d151e3078b4a..a6376852fb28 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs index 182f6eff4414..8b367c5c5cae 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -25,8 +24,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs index 1033fff90e46..2a6ef1d1ab4d 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs @@ -1,13 +1,13 @@ use itertools::Itertools; use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_1_4_2::Vm, HistoryMode, }; @@ -34,9 +34,7 @@ impl Vm { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs index ffa4b4d50b8e..bc5befe3810c 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_4_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs index d8d32a2b6c50..9e562de59866 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_4_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs index 706e70d4b116..3d0e9bda8030 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs @@ -6,16 +6,18 @@ use zk_evm_1_4_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_1_4_2::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, }; + /// The main job of the DecommiterOracle is to implement the DecommittmentProcessor trait - that is /// used by the VM to 'load' bytecodes into memory. #[derive(Debug)] @@ -60,7 +62,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs b/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs index e8d387621907..170bed0eed5d 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs @@ -6,6 +6,7 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ + u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -13,7 +14,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs index 6c4f737f9e94..58318f5d845e 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_4_1::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use crate::{ interface::{ @@ -16,9 +18,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_4_1::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_1_4_2::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -120,15 +125,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs index 0da5736bf955..8cfcd8c327e4 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs @@ -8,8 +8,10 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::L1_MESSAGE_PUBDATA_BYTES, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, H256, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ + ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, H256, U256, +}; +use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ interface::{ diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs b/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs index 5832241d262d..2caf7b060563 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_1_4_2::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs index c1ca93152a03..f938696297b5 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs index 38280aa80513..693690e3b42e 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_1_4_2::{ - constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, - utils::overhead::derive_overhead, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_1_4_2::{ + constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -196,10 +201,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs index 87630a1ff372..52a0dc61d740 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_4_1::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_1_4_2::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs index b3a54c410f4d..d2233a515eab 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_1_4_2::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs b/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs index 11f8b6b6c427..b01b18716836 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::PubdataIndependentBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{interface::L1BatchEnv, vm_1_4_2::constants::MAX_GAS_PER_PUBDATA_BYTE}; @@ -18,11 +17,14 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(fair_pubdata_price, MAX_GAS_PER_PUBDATA_BYTE), + fair_pubdata_price.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); - let gas_per_pubdata = ceil_div(fair_pubdata_price, base_fee); - + let gas_per_pubdata = if fair_pubdata_price == 0 { + 0 + } else { + fair_pubdata_price.div_ceil(base_fee) + }; (base_fee, gas_per_pubdata) } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs index 47bbbb5bae64..501207e52bd9 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs index c97d3ff30e49..6605bea1f6b5 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -25,8 +24,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs index 2d6f081a1886..38d5b40af7e4 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs @@ -1,13 +1,13 @@ use itertools::Itertools; use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_boojum_integration::Vm, HistoryMode, }; @@ -34,9 +34,7 @@ impl Vm { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs index 1e95d0bc8f35..48db28747bef 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_4_0::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs index 704a774893d3..19da0ffda77c 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_4_0::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs index eb7db7097920..b61560afe39d 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs @@ -6,12 +6,13 @@ use zk_evm_1_4_0::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_boojum_integration::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -61,7 +62,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs b/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs index acdfbaaa42e0..b5fc1c5b92f8 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs @@ -6,6 +6,7 @@ use zk_evm_1_4_0::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ + u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -13,7 +14,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ interface::storage::{StoragePtr, WriteStorage}, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs index 2f7d141cb0a7..6396d143b401 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_4_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use crate::{ interface::{ @@ -16,9 +18,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_4_0::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_boojum_integration::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -98,15 +103,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs index ffbb1d80a80e..0d944724afd2 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs @@ -7,8 +7,8 @@ use zk_evm_1_4_0::{ vm_state::VmLocalState, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, U256}; +use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ interface::{ diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs index aafdab9ee428..e916d6e0e66c 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_boojum_integration::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs index 152ccad2fbcb..cb400ab5fa7d 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs index 8bf575effe06..774d2061beb2 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_boojum_integration::{ - constants::MAX_GAS_PER_PUBDATA_BYTE, - utils::overhead::{get_amortized_overhead, OverheadCoefficients}, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_boojum_integration::{ + constants::MAX_GAS_PER_PUBDATA_BYTE, + utils::overhead::{get_amortized_overhead, OverheadCoefficients}, + }, }; /// This structure represents the data that is used by @@ -196,10 +201,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs index 5b6b9b2eca17..dc41926c4485 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_4_0::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_boojum_integration::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs index 386dc040099b..91082e98f9d1 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_boojum_integration::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs b/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs index 8e785775697a..6fa1a38828e0 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::L1PeggedBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{ interface::L1BatchEnv, @@ -12,8 +11,11 @@ use crate::{ /// Calculates the amount of gas required to publish one byte of pubdata pub fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } /// Calculates the base fee and gas per pubdata for the given L1 gas price. @@ -30,7 +32,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs b/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs index 02fe0b8b3000..c6d299075f2a 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs @@ -1,7 +1,6 @@ use zk_evm_1_4_0::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; -use zksync_types::{l1::is_l1_tx_type, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, l1::is_l1_tx_type, U256}; use crate::vm_boojum_integration::constants::{ BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs index adb406eec789..4f05ef30a46d 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use super::{snapshot::L2BlockSnapshot, tx::BootloaderTx}; use crate::{ @@ -51,7 +50,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn make_snapshot(&self) -> L2BlockSnapshot { diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs index 770f232019bf..838c2d6ba60f 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::{l2_block::BootloaderL2Block, tx::BootloaderTx}; use crate::{ @@ -22,8 +21,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_fast/bytecode.rs b/core/lib/multivm/src/versions/vm_fast/bytecode.rs index b75e33a21b05..abbd4461c25e 100644 --- a/core/lib/multivm/src/versions/vm_fast/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_fast/bytecode.rs @@ -1,6 +1,6 @@ use itertools::Itertools; -use zksync_types::H256; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{h256_to_u256, H256}; +use zksync_utils::bytecode::hash_bytecode; use super::Vm; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_fast/events.rs b/core/lib/multivm/src/versions/vm_fast/events.rs index 294e8adce32b..4fb26d306897 100644 --- a/core/lib/multivm/src/versions/vm_fast/events.rs +++ b/core/lib/multivm/src/versions/vm_fast/events.rs @@ -1,5 +1,4 @@ -use zksync_types::{L1BatchNumber, H256}; -use zksync_utils::h256_to_account_address; +use zksync_types::{h256_to_address, L1BatchNumber, H256}; use zksync_vm2::interface::Event; use crate::interface::VmEvent; @@ -16,7 +15,7 @@ impl EventAccumulator { fn into_vm_event(self, block_number: L1BatchNumber) -> VmEvent { VmEvent { location: (block_number, self.tx_number_in_block as u32), - address: h256_to_account_address(&H256(self.topics[0])), + address: h256_to_address(&H256(self.topics[0])), indexed_topics: self.topics[1..].iter().map(H256::from).collect(), value: self.data, } diff --git a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs index 62aba8df5b9b..fb619fa3e5ff 100644 --- a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs @@ -3,8 +3,8 @@ use std::{cell::RefCell, collections::HashMap, rc::Rc}; use zksync_system_constants::{CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS}; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_evm_bytecode, h256_to_u256}; +use zksync_types::{h256_to_u256, U256}; +use zksync_utils::bytecode::hash_evm_bytecode; use zksync_vm2::interface::{ CallframeInterface, CallingMode, GlobalStateInterface, Opcode, OpcodeType, ShouldStop, Tracer, }; diff --git a/core/lib/multivm/src/versions/vm_fast/glue.rs b/core/lib/multivm/src/versions/vm_fast/glue.rs index c2d38f351c04..f1a43d557358 100644 --- a/core/lib/multivm/src/versions/vm_fast/glue.rs +++ b/core/lib/multivm/src/versions/vm_fast/glue.rs @@ -1,5 +1,7 @@ -use zksync_types::l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log}; -use zksync_utils::u256_to_h256; +use zksync_types::{ + l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log}, + u256_to_h256, +}; use zksync_vm2::interface; use crate::glue::GlueFrom; diff --git a/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs b/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs index b3bf15cb1be5..89b22d328ac5 100644 --- a/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs +++ b/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_fast/pubdata.rs b/core/lib/multivm/src/versions/vm_fast/pubdata.rs index c1ca93152a03..f938696297b5 100644 --- a/core/lib/multivm/src/versions/vm_fast/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_fast/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_fast/refund.rs b/core/lib/multivm/src/versions/vm_fast/refund.rs index 05648acddcfe..13637ff97122 100644 --- a/core/lib/multivm/src/versions/vm_fast/refund.rs +++ b/core/lib/multivm/src/versions/vm_fast/refund.rs @@ -1,5 +1,4 @@ -use zksync_types::{H256, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, H256, U256}; use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs index 27192f46d8dd..e00a71a43c36 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs @@ -1,7 +1,8 @@ use std::{any::Any, collections::HashSet, fmt, rc::Rc}; -use zksync_types::{writes::StateDiffRecord, StorageKey, Transaction, H160, H256, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{ + h256_to_u256, writes::StateDiffRecord, StorageKey, Transaction, H160, H256, U256, +}; use zksync_vm2::interface::{Event, HeapId, StateInterface}; use zksync_vm_interface::{ pubdata::PubdataBuilder, storage::ReadStorage, CurrentExecutionState, L2BlockEnv, diff --git a/core/lib/multivm/src/versions/vm_fast/transaction_data.rs b/core/lib/multivm/src/versions/vm_fast/transaction_data.rs index 2ec86eb3ceaf..afc0ef51e7d4 100644 --- a/core/lib/multivm/src/versions/vm_fast/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_fast/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_latest::{ - constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, - utils::overhead::derive_overhead, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_latest::{ + constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -196,10 +201,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index d18f7b91f323..2aab2bfc7d96 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -5,8 +5,10 @@ use zk_evm_1_5_0::{ }; use zksync_contracts::SystemContractCode; use zksync_types::{ + h256_to_u256, l1::is_l1_tx_type, l2_to_l1_log::UserL2ToL1Log, + u256_to_h256, utils::key_for_eth_balance, writes::{ compression::compress_with_best_strategy, StateDiffRecord, BYTES_PER_DERIVED_KEY, @@ -16,7 +18,7 @@ use zksync_types::{ Transaction, BOOTLOADER_ADDRESS, H160, H256, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use zksync_vm2::{ interface::{CallframeInterface, HeapId, StateInterface, Tracer}, ExecutionEnd, FatPointer, Program, Settings, StorageSlot, VirtualMachine, @@ -845,7 +847,7 @@ impl World { ) -> (U256, Program) { ( h256_to_u256(code.hash), - Program::from_words(code.code.clone(), is_bootloader), + Program::new(&code.code, is_bootloader), ) } diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs index 103c5d16540e..95502b8dc60c 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn make_snapshot(&self) -> L2BlockSnapshot { diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs index c409bda35c1d..58dc20346a6f 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, ProtocolVersionId, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, ProtocolVersionId, U256}; use super::tx::BootloaderTx; use crate::{ @@ -27,8 +26,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs index 2cd98c8e58a3..d0390444e1cb 100644 --- a/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs @@ -1,13 +1,13 @@ use itertools::Itertools; use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_latest::Vm, HistoryMode, }; @@ -34,9 +34,7 @@ impl Vm { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs index fd6f393155d7..bded254c7fcc 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_5_0::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs index e7277f38289d..9dac6480dc57 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_5_0::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs index 507e3d8c7598..1afa9b483ec5 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs @@ -10,12 +10,12 @@ use zk_evm_1_5_0::{ }, zkevm_opcode_defs::{VersionedHashHeader, VersionedHashNormalizedPreimage}, }; -use zksync_types::{H256, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, H256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_latest::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -69,7 +69,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .unwrap_or_else(|| panic!("Trying to decommit unexisting hash: {}", hash)); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs index 9c7b68c1ad51..242cdc6a2239 100644 --- a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs @@ -10,6 +10,7 @@ use zk_evm_1_5_0::{ }, }; use zksync_types::{ + h256_to_u256, u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -17,7 +18,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::{ glue::GlueInto, @@ -620,8 +620,7 @@ fn get_pubdata_price_bytes(initial_value: U256, final_value: U256, is_initial: b #[cfg(test)] mod tests { - use zksync_types::H256; - use zksync_utils::h256_to_u256; + use zksync_types::{h256_to_u256, H256}; use super::*; use crate::interface::storage::{InMemoryStorage, StorageView}; diff --git a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs index 96d59f208b03..51c9dde0dd56 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs @@ -8,8 +8,10 @@ use zk_evm_1_5_0::{ vm_state::VmLocalState, zkevm_opcode_defs::{ContractCodeSha256Format, VersionedHashLen32}, }; -use zksync_types::{writes::StateDiffRecord, StorageKey, StorageValue, Transaction, H256, U256}; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; +use zksync_types::{ + h256_to_u256, writes::StateDiffRecord, StorageKey, StorageValue, Transaction, H256, U256, +}; +use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::pubdata::PubdataBuilder; use super::{HistoryEnabled, Vm}; @@ -18,6 +20,7 @@ use crate::{ storage::{InMemoryStorage, ReadStorage, StorageView, WriteStorage}, CurrentExecutionState, L2BlockEnv, VmExecutionMode, VmExecutionResultAndLogs, }, + utils::bytecode::bytes_to_be_words, versions::testonly::{filter_out_base_system_contracts, TestedVm}, vm_latest::{ constants::BOOTLOADER_HEAP_PAGE, @@ -111,7 +114,7 @@ impl TestedVm for TestedLatestVm { .iter() .map(|&bytecode| { let hash = hash_bytecode(bytecode); - let words = bytes_to_be_words(bytecode.to_vec()); + let words = bytes_to_be_words(bytecode); (h256_to_u256(hash), words) }) .collect(); diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs index 61c8ef0b5abf..98ae14ff7f89 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs @@ -7,14 +7,17 @@ use zk_evm_1_5_0::{ FarCallOpcode, FatPointer, Opcode, CALL_IMPLICIT_CALLDATA_FAT_PTR_REGISTER, }, }; -use zksync_types::{CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS}; -use zksync_utils::{bytecode::hash_evm_bytecode, bytes_to_be_words, h256_to_u256}; -use zksync_vm_interface::storage::StoragePtr; +use zksync_types::{h256_to_u256, CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS}; +use zksync_utils::bytecode::hash_evm_bytecode; use super::{traits::VmTracer, utils::read_pointer}; use crate::{ - interface::{storage::WriteStorage, tracer::TracerExecutionStatus}, + interface::{ + storage::{StoragePtr, WriteStorage}, + tracer::TracerExecutionStatus, + }, tracers::dynamic::vm_1_5_0::DynTracer, + utils::bytecode::bytes_to_be_words, vm_latest::{BootloaderState, HistoryMode, SimpleMemory, ZkSyncVmState}, }; @@ -92,7 +95,7 @@ impl VmTracer for EvmDeployTracer { let timestamp = Timestamp(state.local_state.timestamp); for published_bytecode in mem::take(&mut self.pending_bytecodes) { let hash = h256_to_u256(hash_evm_bytecode(&published_bytecode)); - let as_words = bytes_to_be_words(published_bytecode); + let as_words = bytes_to_be_words(&published_bytecode); state .decommittment_processor .insert_dynamic_bytecode(hash, as_words, timestamp); diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs index 998e8a13ad25..4c71c3b2fc49 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_5_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use zksync_vm_interface::pubdata::PubdataBuilder; use crate::{ @@ -17,9 +19,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_5_0::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_latest::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -132,15 +137,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs index 78826a16313d..f3fc1b167b45 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs @@ -5,8 +5,7 @@ use zk_evm_1_5_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{H256, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, H256, U256}; use crate::{ interface::{ diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs index 0a11f5d3f849..50901dca62fc 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, SECP256R1_VERIFY_PRECOMPILE_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_latest::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs index 90948f2f89fd..544934665adf 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_latest::{ - constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, - utils::overhead::derive_overhead, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_latest::{ + constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -209,10 +214,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs index 90bb0c610e2c..03f306f36c52 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_5_0::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_latest::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -88,25 +88,20 @@ pub(crate) fn new_vm_state( DecommitterOracle::new(storage); let mut initial_bytecodes = vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )]; if let Some(evm_emulator) = &system_env.base_system_smart_contracts.evm_emulator { - initial_bytecodes.push((h256_to_u256(evm_emulator.hash), evm_emulator.code.clone())); + initial_bytecodes.push(( + h256_to_u256(evm_emulator.hash), + bytes_to_be_words(&evm_emulator.code), + )); } decommittment_processor.populate(initial_bytecodes, Timestamp(0)); memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs index b3bf15cb1be5..89b22d328ac5 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_latest/utils/fee.rs b/core/lib/multivm/src/versions/vm_latest/utils/fee.rs index 666fcca87e12..58b457dce68a 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::PubdataIndependentBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{interface::L1BatchEnv, vm_latest::constants::MAX_GAS_PER_PUBDATA_BYTE}; @@ -18,11 +17,14 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(fair_pubdata_price, MAX_GAS_PER_PUBDATA_BYTE), + fair_pubdata_price.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); - let gas_per_pubdata = ceil_div(fair_pubdata_price, base_fee); - + let gas_per_pubdata = if fair_pubdata_price == 0 { + 0 + } else { + fair_pubdata_price.div_ceil(base_fee) + }; (base_fee, gas_per_pubdata) } diff --git a/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs index 59d3eb0ef0fc..840f1687ccfa 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index ff90eb14ee42..5a0e77023a5e 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -2,11 +2,12 @@ use std::{collections::HashMap, rc::Rc}; use circuit_sequencer_api_1_5_0::sort_storage_access::sort_storage_access_queries; use zksync_types::{ + h256_to_u256, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, + u256_to_h256, vm::VmVersion, Transaction, H256, }; -use zksync_utils::{be_words_to_bytes, h256_to_u256, u256_to_h256}; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ @@ -18,7 +19,7 @@ use crate::{ VmExecutionResultAndLogs, VmFactory, VmInterface, VmInterfaceHistoryEnabled, VmTrackingContracts, }, - utils::events::extract_l2tol1logs_from_l1_messenger, + utils::{bytecode::be_words_to_bytes, events::extract_l2tol1logs_from_l1_messenger}, vm_latest::{ bootloader_state::BootloaderState, old_vm::{events::merge_events, history_recorder::HistoryEnabled}, diff --git a/core/lib/multivm/src/versions/vm_m5/events.rs b/core/lib/multivm/src/versions/vm_m5/events.rs index a444ad37feb5..659b41cc2060 100644 --- a/core/lib/multivm/src/versions/vm_m5/events.rs +++ b/core/lib/multivm/src/versions/vm_m5/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub fn merge_events(events: Vec) -> Vec { .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_m5/history_recorder.rs b/core/lib/multivm/src/versions/vm_m5/history_recorder.rs index f744be32d0bf..f7923e42b667 100644 --- a/core/lib/multivm/src/versions/vm_m5/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_m5/history_recorder.rs @@ -9,8 +9,7 @@ use zk_evm_1_3_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::vm_m5::storage::{Storage, StoragePtr}; diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs index bc43c72966ea..ca6fde506f87 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs @@ -6,13 +6,16 @@ use zk_evm_1_3_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; -use crate::vm_m5::{ - history_recorder::HistoryRecorder, - storage::{Storage, StoragePtr}, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_m5::{ + history_recorder::HistoryRecorder, + storage::{Storage, StoragePtr}, + }, }; #[derive(Debug)] @@ -53,7 +56,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs b/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs index 7ccfdf2f30c7..ab373e9e7696 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs @@ -7,10 +7,9 @@ use zk_evm_1_3_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::vm_m5::{ diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs b/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs index 45f8ed88f834..ea92307d1224 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs @@ -16,22 +16,23 @@ use zk_evm_1_3_1::{ }, }; use zksync_types::{ - get_code_key, web3::keccak256, AccountTreeId, Address, StorageKey, - ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, CONTRACT_DEPLOYER_ADDRESS, H256, - KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, - L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, U256, -}; -use zksync_utils::{ - be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, + get_code_key, h256_to_address, u256_to_address, u256_to_h256, web3::keccak256, AccountTreeId, + Address, StorageKey, ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, + CONTRACT_DEPLOYER_ADDRESS, H256, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, + L1_MESSENGER_ADDRESS, L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, + SYSTEM_CONTEXT_ADDRESS, U256, }; -use crate::vm_m5::{ - errors::VmRevertReasonParsingResult, - memory::SimpleMemory, - storage::{Storage, StoragePtr}, - utils::{aux_heap_page_from_base, heap_page_from_base}, - vm_instance::{get_vm_hook_params, VM_HOOK_POSITION}, - vm_with_bootloader::BOOTLOADER_HEAP_PAGE, +use crate::{ + utils::bytecode::be_bytes_to_safe_address, + vm_m5::{ + errors::VmRevertReasonParsingResult, + memory::SimpleMemory, + storage::{Storage, StoragePtr}, + utils::{aux_heap_page_from_base, heap_page_from_base}, + vm_instance::{get_vm_hook_params, VM_HOOK_POSITION}, + vm_with_bootloader::BOOTLOADER_HEAP_PAGE, + }, }; pub trait ExecutionEndTracer: Tracer { @@ -322,7 +323,7 @@ impl ValidationTracer { // The user is allowed to touch its own slots or slots semantically related to him. let valid_users_slot = address == self.user_address - || u256_to_account_address(&key) == self.user_address + || u256_to_address(&key) == self.user_address || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); if valid_users_slot { return true; @@ -383,7 +384,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -450,7 +451,7 @@ impl ValidationTracer { let value = self.storage.borrow_mut().get_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/versions/vm_m5/refunds.rs b/core/lib/multivm/src/versions/vm_m5/refunds.rs index fd4e2788f035..8b0d3e5d84c4 100644 --- a/core/lib/multivm/src/versions/vm_m5/refunds.rs +++ b/core/lib/multivm/src/versions/vm_m5/refunds.rs @@ -1,6 +1,5 @@ use zk_evm_1_3_1::aux_structures::Timestamp; -use zksync_types::U256; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, U256}; use crate::vm_m5::{ storage::Storage, diff --git a/core/lib/multivm/src/versions/vm_m5/test_utils.rs b/core/lib/multivm/src/versions/vm_m5/test_utils.rs index d7c0dfb9f6d0..ff6ed0392c85 100644 --- a/core/lib/multivm/src/versions/vm_m5/test_utils.rs +++ b/core/lib/multivm/src/versions/vm_m5/test_utils.rs @@ -14,13 +14,13 @@ use zk_evm_1_3_1::{ }; use zksync_contracts::deployer_contract; use zksync_types::{ + address_to_h256, ethabi::{Address, Token}, + h256_to_address, u256_to_h256, web3::keccak256, Execute, Nonce, StorageKey, StorageValue, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, h256_to_account_address, u256_to_h256, -}; +use zksync_utils::bytecode::hash_bytecode; use super::utils::StorageLogQuery; use crate::vm_m5::{ @@ -172,5 +172,5 @@ pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) - let hash = keccak256(&digest); - h256_to_account_address(&H256(hash)) + h256_to_address(&H256(hash)) } diff --git a/core/lib/multivm/src/versions/vm_m5/transaction_data.rs b/core/lib/multivm/src/versions/vm_m5/transaction_data.rs index b64e3f770185..2307c5e24127 100644 --- a/core/lib/multivm/src/versions/vm_m5/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_m5/transaction_data.rs @@ -1,17 +1,20 @@ use zk_evm_1_3_1::zkevm_opcode_defs::system_params::{MAX_PUBDATA_PER_BLOCK, MAX_TX_ERGS_LIMIT}; use zksync_types::{ + address_to_h256, ceil_div_u256, ethabi::{encode, Address, Token}, fee::encoding_len, + h256_to_u256, l2::TransactionType, ExecuteTransactionCommon, Transaction, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, ceil_div_u256, h256_to_u256, -}; +use zksync_utils::bytecode::hash_bytecode; use super::vm_with_bootloader::MAX_GAS_PER_PUBDATA_BYTE; -use crate::vm_m5::vm_with_bootloader::{ - BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_m5::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, + }, }; const L1_TX_TYPE: u8 = 255; @@ -171,10 +174,7 @@ impl TransactionData { } pub fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_m5/utils.rs b/core/lib/multivm/src/versions/vm_m5/utils.rs index a38618395b1f..de8c746bfb80 100644 --- a/core/lib/multivm/src/versions/vm_m5/utils.rs +++ b/core/lib/multivm/src/versions/vm_m5/utils.rs @@ -7,8 +7,7 @@ use zk_evm_1_3_1::{ }; use zksync_contracts::BaseSystemContracts; use zksync_system_constants::ZKPORTER_IS_AVAILABLE; -use zksync_types::{Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_m5/vm.rs b/core/lib/multivm/src/versions/vm_m5/vm.rs index 55afeed17cd1..266a0a437e5e 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm.rs @@ -1,7 +1,6 @@ use std::rc::Rc; -use zksync_types::{vm::VmVersion, Transaction}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, vm::VmVersion, Transaction}; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs index cd2979db5e57..653169cd7ff0 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs @@ -14,15 +14,14 @@ use zk_evm_1_3_1::{ use zksync_contracts::BaseSystemContracts; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; use zksync_types::{ - fee_model::L1PeggedBatchFeeModelInput, Address, Transaction, BOOTLOADER_ADDRESS, - L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, -}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, misc::ceil_div, + address_to_u256, fee_model::L1PeggedBatchFeeModelInput, h256_to_u256, Address, Transaction, + BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, }; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::L1BatchEnv, + utils::bytecode::bytes_to_be_words, vm_m5::{ bootloader_state::BootloaderState, oracles::OracleWithHistory, @@ -73,8 +72,11 @@ pub(crate) fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } pub(crate) fn derive_base_fee_and_gas_per_pubdata( @@ -91,7 +93,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( @@ -347,7 +349,7 @@ pub fn init_vm_inner( oracle_tools.decommittment_processor.populate( vec![( h256_to_u256(base_system_contract.default_aa.hash), - base_system_contract.default_aa.code.clone(), + bytes_to_be_words(&base_system_contract.default_aa.code), )], Timestamp(0), ); @@ -355,7 +357,7 @@ pub fn init_vm_inner( oracle_tools.memory.populate( vec![( BOOTLOADER_CODE_PAGE, - base_system_contract.bootloader.code.clone(), + bytes_to_be_words(&base_system_contract.bootloader.code), )], Timestamp(0), ); @@ -585,9 +587,7 @@ fn formal_calldata_abi() -> PrimitiveValue { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_m6/events.rs b/core/lib/multivm/src/versions/vm_m6/events.rs index a444ad37feb5..659b41cc2060 100644 --- a/core/lib/multivm/src/versions/vm_m6/events.rs +++ b/core/lib/multivm/src/versions/vm_m6/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub fn merge_events(events: Vec) -> Vec { .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_m6/history_recorder.rs b/core/lib/multivm/src/versions/vm_m6/history_recorder.rs index 63dc9be4933a..5f7a116c62ac 100644 --- a/core/lib/multivm/src/versions/vm_m6/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_m6/history_recorder.rs @@ -9,8 +9,7 @@ use zk_evm_1_3_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::vm_m6::storage::{Storage, StoragePtr}; diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs index fe59580e2ce9..a43ec4ec4fd8 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs @@ -6,13 +6,16 @@ use zk_evm_1_3_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; -use crate::vm_m6::{ - history_recorder::{HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory}, - storage::{Storage, StoragePtr}, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_m6::{ + history_recorder::{HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory}, + storage::{Storage, StoragePtr}, + }, }; /// The main job of the DecommiterOracle is to implement the DecommitmentProcessor trait - that is @@ -59,7 +62,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs b/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs index 5393b9e48169..7a59754140c9 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::vm_m6::{ diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs index 4d963d08952d..9b94ec9de84f 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_m6::{ history_recorder::HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs index f046ba5befe9..e6b040b93f5d 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs @@ -11,22 +11,25 @@ use zksync_system_constants::{ KECCAK256_PRECOMPILE_ADDRESS, L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, }; -use zksync_types::{get_code_key, web3::keccak256, AccountTreeId, Address, StorageKey, H256, U256}; -use zksync_utils::{ - be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, web3::keccak256, AccountTreeId, + Address, StorageKey, H256, U256, }; -use crate::vm_m6::{ - errors::VmRevertReasonParsingResult, - history_recorder::HistoryMode, - memory::SimpleMemory, - oracles::tracer::{ - utils::{ - computational_gas_price, get_calldata_page_via_abi, print_debug_if_needed, VmHook, +use crate::{ + utils::bytecode::be_bytes_to_safe_address, + vm_m6::{ + errors::VmRevertReasonParsingResult, + history_recorder::HistoryMode, + memory::SimpleMemory, + oracles::tracer::{ + utils::{ + computational_gas_price, get_calldata_page_via_abi, print_debug_if_needed, VmHook, + }, + ExecutionEndTracer, PendingRefundTracer, PubdataSpentTracer, StorageInvocationTracer, }, - ExecutionEndTracer, PendingRefundTracer, PubdataSpentTracer, StorageInvocationTracer, + storage::{Storage, StoragePtr}, }, - storage::{Storage, StoragePtr}, }; #[derive(Debug, Clone, Eq, PartialEq, Copy)] @@ -252,7 +255,7 @@ impl ValidationTracer { // The user is allowed to touch its own slots or slots semantically related to him. let valid_users_slot = address == self.user_address - || u256_to_account_address(&key) == self.user_address + || u256_to_address(&key) == self.user_address || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); if valid_users_slot { return true; @@ -319,7 +322,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -386,7 +389,7 @@ impl ValidationTracer { let value = self.storage.borrow_mut().get_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/versions/vm_m6/refunds.rs b/core/lib/multivm/src/versions/vm_m6/refunds.rs index 406bf380a0b2..f98c84409410 100644 --- a/core/lib/multivm/src/versions/vm_m6/refunds.rs +++ b/core/lib/multivm/src/versions/vm_m6/refunds.rs @@ -1,6 +1,5 @@ use zk_evm_1_3_1::aux_structures::Timestamp; -use zksync_types::U256; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, U256}; use crate::vm_m6::{ history_recorder::HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_m6/test_utils.rs b/core/lib/multivm/src/versions/vm_m6/test_utils.rs index 4bd39bc56dd4..438a67129ac6 100644 --- a/core/lib/multivm/src/versions/vm_m6/test_utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/test_utils.rs @@ -12,13 +12,13 @@ use itertools::Itertools; use zk_evm_1_3_1::{aux_structures::Timestamp, vm_state::VmLocalState}; use zksync_contracts::deployer_contract; use zksync_types::{ + address_to_h256, ethabi::{Address, Token}, + h256_to_address, u256_to_h256, web3::keccak256, Execute, Nonce, StorageKey, StorageValue, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, h256_to_account_address, u256_to_h256, -}; +use zksync_utils::bytecode::hash_bytecode; use super::utils::StorageLogQuery; use crate::vm_m6::{ @@ -172,5 +172,5 @@ pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) - let hash = keccak256(&digest); - h256_to_account_address(&H256(hash)) + h256_to_address(&H256(hash)) } diff --git a/core/lib/multivm/src/versions/vm_m6/transaction_data.rs b/core/lib/multivm/src/versions/vm_m6/transaction_data.rs index a8f80ea3255e..cfd2ebf00e44 100644 --- a/core/lib/multivm/src/versions/vm_m6/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_m6/transaction_data.rs @@ -1,18 +1,21 @@ use zk_evm_1_3_1::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_types::{ + address_to_h256, ceil_div_u256, ethabi::{encode, Address, Token}, fee::encoding_len, + h256_to_u256, l1::is_l1_tx_type, l2::TransactionType, ExecuteTransactionCommon, Transaction, MAX_L2_TX_GAS_LIMIT, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, ceil_div_u256, h256_to_u256, -}; +use zksync_utils::bytecode::hash_bytecode; use super::vm_with_bootloader::{MAX_GAS_PER_PUBDATA_BYTE, MAX_TXS_IN_BLOCK}; -use crate::vm_m6::vm_with_bootloader::{ - BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_m6::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, + }, }; pub(crate) const L1_TX_TYPE: u8 = 255; @@ -198,10 +201,7 @@ impl TransactionData { } pub fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_m6/utils.rs b/core/lib/multivm/src/versions/vm_m6/utils.rs index 912a30a4eafc..a9304f5cd525 100644 --- a/core/lib/multivm/src/versions/vm_m6/utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/utils.rs @@ -7,8 +7,7 @@ use zk_evm_1_3_1::{ }; use zksync_contracts::BaseSystemContracts; use zksync_system_constants::ZKPORTER_IS_AVAILABLE; -use zksync_types::{Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_m6/vm.rs b/core/lib/multivm/src/versions/vm_m6/vm.rs index 4c67a2184180..0443dc8fb55e 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm.rs @@ -1,7 +1,7 @@ use std::{collections::HashSet, rc::Rc}; -use zksync_types::{vm::VmVersion, Transaction}; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{h256_to_u256, vm::VmVersion, Transaction}; +use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs index ae44e721b0d7..a47ffb116364 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs @@ -14,16 +14,14 @@ use zk_evm_1_3_1::{ use zksync_contracts::BaseSystemContracts; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; use zksync_types::{ - fee_model::L1PeggedBatchFeeModelInput, Address, Transaction, BOOTLOADER_ADDRESS, - L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, -}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, misc::ceil_div, + address_to_u256, fee_model::L1PeggedBatchFeeModelInput, h256_to_u256, Address, Transaction, + BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, }; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{CompressedBytecodeInfo, L1BatchEnv}, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_m6::{ bootloader_state::BootloaderState, history_recorder::HistoryMode, @@ -84,8 +82,11 @@ pub(crate) fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } pub(crate) fn derive_base_fee_and_gas_per_pubdata( @@ -102,7 +103,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( @@ -396,7 +397,7 @@ pub fn init_vm_inner( oracle_tools.decommittment_processor.populate( vec![( h256_to_u256(base_system_contract.default_aa.hash), - base_system_contract.default_aa.code.clone(), + bytes_to_be_words(&base_system_contract.default_aa.code), )], Timestamp(0), ); @@ -404,7 +405,7 @@ pub fn init_vm_inner( oracle_tools.memory.populate( vec![( BOOTLOADER_CODE_PAGE, - base_system_contract.bootloader.code.clone(), + bytes_to_be_words(&base_system_contract.bootloader.code), )], Timestamp(0), ); @@ -821,7 +822,7 @@ pub(crate) fn get_bootloader_memory_for_encoded_tx( .flat_map(bytecode::encode_call) .collect(); - let memory_addition = bytes_to_be_words(memory_addition); + let memory_addition = bytes_to_be_words(&memory_addition); memory.extend( (compressed_bytecodes_offset..compressed_bytecodes_offset + memory_addition.len()) @@ -906,9 +907,7 @@ fn formal_calldata_abi() -> PrimitiveValue { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs index e8cabebc9f7c..3bc669105b05 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs index 14c895d7a0b4..a05dc1ae2430 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs @@ -1,10 +1,9 @@ -use zksync_types::U256; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ interface::{BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode}, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_refunds_enhancement::{ bootloader_state::l2_block::BootloaderL2Block, constants::{ @@ -23,8 +22,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs index f7ab9ae8b517..766cac391e32 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs @@ -1,13 +1,13 @@ use itertools::Itertools; use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_refunds_enhancement::Vm, HistoryMode, }; @@ -34,9 +34,7 @@ impl Vm { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs index 52a4ed8a2876..05ec6557e905 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_3::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs index 8af2c42db957..d25d2a57259d 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_3_3::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs index ccc8d9052b7e..fc9d0794b958 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs @@ -6,12 +6,13 @@ use zk_evm_1_3_3::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_refunds_enhancement::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -61,7 +62,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs index a9c5b71e782e..73a5d610bc26 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs index 0dbf5a3cbf40..98fee074a940 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs @@ -5,8 +5,8 @@ use zk_evm_1_3_3::{ vm_state::VmLocalState, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, U256}; +use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ interface::{ diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs index 1d3e9a272764..d744261e4f48 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_refunds_enhancement::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs index 22ab09296c91..5bc13bfac2d0 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_refunds_enhancement::{ - constants::MAX_GAS_PER_PUBDATA_BYTE, - utils::overhead::{get_amortized_overhead, OverheadCoefficients}, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_refunds_enhancement::{ + constants::MAX_GAS_PER_PUBDATA_BYTE, + utils::overhead::{get_amortized_overhead, OverheadCoefficients}, + }, }; /// This structure represents the data that is used by @@ -196,10 +201,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs index 22f92891e40a..6776bc37c9d5 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_3_3::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_refunds_enhancement::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs index b449165be348..58419acbe60a 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_refunds_enhancement::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs index f7203b57b4c4..8bd06c7faa6b 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::L1PeggedBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{ interface::L1BatchEnv, @@ -12,8 +11,11 @@ use crate::{ /// Calculates the amount of gas required to publish one byte of pubdata pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } /// Calculates the base fee and gas per pubdata for the given L1 gas price. @@ -30,7 +32,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs index af25c4b4d7c4..efcee968db40 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs @@ -1,7 +1,6 @@ use zk_evm_1_3_3::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; -use zksync_types::{l1::is_l1_tx_type, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, l1::is_l1_tx_type, U256}; use crate::vm_refunds_enhancement::constants::{ BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs index 197ecbff5896..d100b17c7c08 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs index 3e2474835fa4..4c33aeb6e147 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -23,8 +22,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs index d5f2b50b83fc..2b26d4fc9d6d 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs @@ -1,13 +1,13 @@ use itertools::Itertools; use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_virtual_blocks::Vm, HistoryMode, }; @@ -34,9 +34,7 @@ impl Vm { pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { let bytecode_hash = hash_bytecode(&bytecode); let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs index 52a4ed8a2876..05ec6557e905 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_3::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs index cbd4dc0ed738..111a337bf449 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_3_3::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs index 3c8d72b0b33a..fad51513dbca 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs @@ -6,12 +6,13 @@ use zk_evm_1_3_3::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; +use zksync_utils::bytecode::bytecode_len_in_words; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_virtual_blocks::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -61,7 +62,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs index defbad70f1a9..0b3a590d8d18 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs index a2ca08a7ef96..b35dfecfa400 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs @@ -8,8 +8,10 @@ use zk_evm_1_3_3::{ vm_state::VmLocalState, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, StorageKey, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ + ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, StorageKey, U256, +}; +use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ interface::{ diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs index ef8219ec2b4d..6db2bac819df 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_virtual_blocks::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs index c96004163a65..a2540d12a670 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; - -use crate::vm_virtual_blocks::{ - constants::MAX_GAS_PER_PUBDATA_BYTE, - utils::overhead::{get_amortized_overhead, OverheadCoefficients}, +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_virtual_blocks::{ + constants::MAX_GAS_PER_PUBDATA_BYTE, + utils::overhead::{get_amortized_overhead, OverheadCoefficients}, + }, }; /// This structure represents the data that is used by @@ -196,10 +201,7 @@ impl TransactionData { } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs index d26acc4e9301..d1509bd016d8 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_3_3::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_virtual_blocks::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs index f86d8749c9ed..08fe00741189 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_virtual_blocks::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs index a53951a851e1..e9d46570983d 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::L1PeggedBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{ interface::L1BatchEnv, @@ -12,8 +11,11 @@ use crate::{ /// Calculates the amount of gas required to publish one byte of pubdata pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } /// Calculates the base fee and gas per pubdata for the given L1 gas price. @@ -31,7 +33,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs index cba4700002bb..6c79c05bc5b2 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs @@ -1,7 +1,6 @@ use zk_evm_1_3_3::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; -use zksync_types::{l1::is_l1_tx_type, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, l1::is_l1_tx_type, U256}; use crate::vm_virtual_blocks::constants::{ BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, diff --git a/core/lib/state/Cargo.toml b/core/lib/state/Cargo.toml index dd56368f3d2e..ced06de1a8e8 100644 --- a/core/lib/state/Cargo.toml +++ b/core/lib/state/Cargo.toml @@ -14,7 +14,6 @@ categories.workspace = true vise.workspace = true zksync_dal.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true zksync_shared_metrics.workspace = true zksync_storage.workspace = true zksync_vm_interface.workspace = true diff --git a/core/lib/state/src/storage_factory/mod.rs b/core/lib/state/src/storage_factory/mod.rs index 0b514f8f9644..be7e20c5f83d 100644 --- a/core/lib/state/src/storage_factory/mod.rs +++ b/core/lib/state/src/storage_factory/mod.rs @@ -5,8 +5,7 @@ use async_trait::async_trait; use tokio::{runtime::Handle, sync::watch}; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_storage::RocksDB; -use zksync_types::{L1BatchNumber, StorageKey, StorageValue, H256}; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, L1BatchNumber, StorageKey, StorageValue, H256}; use zksync_vm_interface::storage::{ReadStorage, StorageSnapshot}; use self::metrics::{SnapshotStage, SNAPSHOT_METRICS}; @@ -201,10 +200,7 @@ impl CommonStorage<'static> { let factory_deps = bytecodes .into_iter() - .map(|(hash_u256, words)| { - let bytes: Vec = words.into_iter().flatten().collect(); - (u256_to_h256(hash_u256), bytes) - }) + .map(|(hash_u256, bytes)| (u256_to_h256(hash_u256), bytes)) .collect(); let storage = previous_values.into_iter().map(|(key, prev_value)| { diff --git a/core/lib/tee_verifier/Cargo.toml b/core/lib/tee_verifier/Cargo.toml index 331c47e365eb..289803fb5a89 100644 --- a/core/lib/tee_verifier/Cargo.toml +++ b/core/lib/tee_verifier/Cargo.toml @@ -17,7 +17,6 @@ zksync_merkle_tree.workspace = true zksync_multivm.workspace = true zksync_prover_interface.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true anyhow.workspace = true once_cell.workspace = true diff --git a/core/lib/tee_verifier/src/lib.rs b/core/lib/tee_verifier/src/lib.rs index 140085dbb9fe..8e8362b57f4b 100644 --- a/core/lib/tee_verifier/src/lib.rs +++ b/core/lib/tee_verifier/src/lib.rs @@ -23,10 +23,9 @@ use zksync_prover_interface::inputs::{ StorageLogMetadata, V1TeeVerifierInput, WitnessInputMerklePaths, }; use zksync_types::{ - block::L2BlockExecutionData, commitment::PubdataParams, L1BatchNumber, StorageLog, - StorageValue, Transaction, H256, + block::L2BlockExecutionData, commitment::PubdataParams, u256_to_h256, L1BatchNumber, + StorageLog, StorageValue, Transaction, H256, }; -use zksync_utils::u256_to_h256; /// A structure to hold the result of verification. pub struct VerificationResult { @@ -305,7 +304,6 @@ mod tests { use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_multivm::interface::{L1BatchEnv, SystemEnv, TxExecutionMode}; use zksync_prover_interface::inputs::{TeeVerifierInput, VMRunWitnessInputData}; - use zksync_types::U256; use super::*; @@ -345,11 +343,11 @@ mod tests { version: Default::default(), base_system_smart_contracts: BaseSystemContracts { bootloader: SystemContractCode { - code: vec![U256([1; 4])], + code: vec![1; 32], hash: H256([1; 32]), }, default_aa: SystemContractCode { - code: vec![U256([1; 4])], + code: vec![1; 32], hash: H256([1; 32]), }, evm_emulator: None, diff --git a/core/lib/types/src/abi.rs b/core/lib/types/src/abi.rs index 84f8aba64869..1ce709617ccf 100644 --- a/core/lib/types/src/abi.rs +++ b/core/lib/types/src/abi.rs @@ -1,9 +1,10 @@ use anyhow::Context as _; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ ethabi, ethabi::{ParamType, Token}, + h256_to_u256, transaction_request::TransactionRequest, web3, Address, H256, U256, }; diff --git a/core/lib/types/src/block.rs b/core/lib/types/src/block.rs index 310e3a73b8e8..804da61b7295 100644 --- a/core/lib/types/src/block.rs +++ b/core/lib/types/src/block.rs @@ -4,13 +4,12 @@ use serde::{Deserialize, Serialize}; use zksync_basic_types::{commitment::PubdataParams, Address, Bloom, BloomInput, H256, U256}; use zksync_contracts::BaseSystemContractsHashes; use zksync_system_constants::SYSTEM_BLOCK_INFO_BLOCK_NUMBER_MULTIPLIER; -use zksync_utils::concat_and_hash; use crate::{ fee_model::BatchFeeInput, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, priority_op_onchain_data::PriorityOpOnchainData, - web3::keccak256, + web3::{keccak256, keccak256_concat}, AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, }; @@ -253,7 +252,7 @@ impl L2BlockHasher { /// Updates this hasher with a transaction hash. This should be called for all transactions in the block /// in the order of their execution. pub fn push_tx_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash); + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash); } /// Returns the hash of the L2 block. diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 40532a1e5899..1eba7e7a9ec0 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -17,7 +17,6 @@ use zksync_system_constants::{ KNOWN_CODES_STORAGE_ADDRESS, L2_TO_L1_LOGS_TREE_ROOT_KEY, STATE_DIFF_HASH_KEY_PRE_GATEWAY, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::u256_to_h256; use crate::{ blob::num_blobs_required, @@ -26,6 +25,7 @@ use crate::{ l2_to_l1_logs_tree_size, parse_system_logs_for_blob_hashes_pre_gateway, L2ToL1Log, SystemL2ToL1Log, UserL2ToL1Log, }, + u256_to_h256, web3::keccak256, writes::{ compress_state_diffs, InitialStorageWrite, RepeatedStorageWrite, StateDiffRecord, diff --git a/core/lib/types/src/fee.rs b/core/lib/types/src/fee.rs index 9dc2cda9e62b..f302c51cd4a9 100644 --- a/core/lib/types/src/fee.rs +++ b/core/lib/types/src/fee.rs @@ -1,5 +1,4 @@ use serde::{Deserialize, Serialize}; -use zksync_utils::ceil_div; use crate::U256; @@ -43,10 +42,10 @@ pub fn encoding_len( // All of the fields are encoded as `bytes`, so their encoding takes ceil(len, 32) slots. // For factory deps we only provide hashes, which are encoded as an array of bytes32. - let dynamic_len = ceil_div(data_len, 32) - + ceil_div(signature_len, 32) - + ceil_div(paymaster_input_len, 32) - + ceil_div(reserved_dynamic_len, 32) + let dynamic_len = data_len.div_ceil(32) + + signature_len.div_ceil(32) + + paymaster_input_len.div_ceil(32) + + reserved_dynamic_len.div_ceil(32) + factory_deps_len; BASE_LEN + dynamic_len as usize diff --git a/core/lib/types/src/fee_model.rs b/core/lib/types/src/fee_model.rs index ae346656ea6f..79515e6f63a9 100644 --- a/core/lib/types/src/fee_model.rs +++ b/core/lib/types/src/fee_model.rs @@ -1,13 +1,10 @@ -// FIXME: separate crate together with node_fee_model interfaces? - use std::num::NonZeroU64; use bigdecimal::{BigDecimal, ToPrimitive}; use serde::{Deserialize, Serialize}; use zksync_system_constants::L1_GAS_PER_PUBDATA_BYTE; -use zksync_utils::ceil_div_u256; -use crate::{ProtocolVersionId, U256}; +use crate::{ceil_div_u256, ProtocolVersionId, U256}; /// Fee input to be provided into the VM. It contains two options: /// - `L1Pegged`: L1 gas price is provided to the VM, and the pubdata price is derived from it. Using this option is required for the diff --git a/core/lib/types/src/l1/mod.rs b/core/lib/types/src/l1/mod.rs index e8144c75db2e..0cc0f3b0e489 100644 --- a/core/lib/types/src/l1/mod.rs +++ b/core/lib/types/src/l1/mod.rs @@ -4,19 +4,18 @@ use std::convert::TryFrom; use serde::{Deserialize, Serialize}; use zksync_basic_types::{web3::Log, Address, L1BlockNumber, PriorityOpId, H256, U256}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, h256_to_u256, u256_to_account_address, -}; +use zksync_utils::bytecode::hash_bytecode; use super::Transaction; use crate::{ - abi, ethabi, + abi, address_to_u256, ethabi, h256_to_u256, helpers::unix_timestamp_ms, l1::error::L1TxParseError, l2::TransactionType, priority_op_onchain_data::{PriorityOpOnchainData, PriorityOpOnchainMetadata}, tx::Execute, - ExecuteTransactionCommon, PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, + u256_to_address, ExecuteTransactionCommon, PRIORITY_OPERATION_L2_TX_TYPE, + PROTOCOL_UPGRADE_TX_TYPE, }; pub mod error; @@ -332,10 +331,10 @@ impl TryFrom for L1Tx { let common_data = L1TxCommonData { serial_id: PriorityOpId(req.transaction.nonce.try_into().unwrap()), canonical_tx_hash: H256::from_slice(&req.tx_hash), - sender: u256_to_account_address(&req.transaction.from), + sender: u256_to_address(&req.transaction.from), layer_2_tip_fee: U256::zero(), to_mint: req.transaction.reserved[0], - refund_recipient: u256_to_account_address(&req.transaction.reserved[1]), + refund_recipient: u256_to_address(&req.transaction.reserved[1]), full_fee: U256::zero(), gas_limit: req.transaction.gas_limit, max_fee_per_gas: req.transaction.max_fee_per_gas, @@ -347,7 +346,7 @@ impl TryFrom for L1Tx { }; let execute = Execute { - contract_address: Some(u256_to_account_address(&req.transaction.to)), + contract_address: Some(u256_to_address(&req.transaction.to)), calldata: req.transaction.data, factory_deps: req.factory_deps, value: req.transaction.value, diff --git a/core/lib/types/src/l2_to_l1_log.rs b/core/lib/types/src/l2_to_l1_log.rs index 957cfa9a1a6a..1b84a79024c7 100644 --- a/core/lib/types/src/l2_to_l1_log.rs +++ b/core/lib/types/src/l2_to_l1_log.rs @@ -117,11 +117,10 @@ pub fn parse_system_logs_for_blob_hashes_pre_gateway( #[cfg(test)] mod tests { - use zksync_basic_types::U256; use zksync_system_constants::L1_MESSENGER_ADDRESS; - use zksync_utils::u256_to_h256; - use super::L2ToL1Log; + use super::*; + use crate::{u256_to_h256, U256}; #[test] fn l2_to_l1_log_to_bytes() { diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs index 320264f28f0a..48ed7445ef5e 100644 --- a/core/lib/types/src/lib.rs +++ b/core/lib/types/src/lib.rs @@ -17,9 +17,7 @@ pub use storage::*; pub use tx::Execute; pub use zksync_basic_types::{protocol_version::ProtocolVersionId, vm, *}; pub use zksync_crypto_primitives::*; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, h256_to_u256, u256_to_account_address, -}; +use zksync_utils::bytecode::hash_bytecode; use crate::{ l2::{L2Tx, TransactionType}, @@ -368,10 +366,10 @@ impl Transaction { .map_err(|err| anyhow::format_err!("{err}"))?, ), canonical_tx_hash: hash, - sender: u256_to_account_address(&tx.from), + sender: u256_to_address(&tx.from), layer_2_tip_fee: U256::zero(), to_mint: tx.reserved[0], - refund_recipient: u256_to_account_address(&tx.reserved[1]), + refund_recipient: u256_to_address(&tx.reserved[1]), full_fee: U256::zero(), gas_limit: tx.gas_limit, max_fee_per_gas: tx.max_fee_per_gas, @@ -385,9 +383,9 @@ impl Transaction { ExecuteTransactionCommon::ProtocolUpgrade(ProtocolUpgradeTxCommonData { upgrade_id: tx.nonce.try_into().unwrap(), canonical_tx_hash: hash, - sender: u256_to_account_address(&tx.from), + sender: u256_to_address(&tx.from), to_mint: tx.reserved[0], - refund_recipient: u256_to_account_address(&tx.reserved[1]), + refund_recipient: u256_to_address(&tx.reserved[1]), gas_limit: tx.gas_limit, max_fee_per_gas: tx.max_fee_per_gas, gas_per_pubdata_limit: tx.gas_per_pubdata_byte_limit, @@ -397,7 +395,7 @@ impl Transaction { unknown_type => anyhow::bail!("unknown tx type {unknown_type}"), }, execute: Execute { - contract_address: Some(u256_to_account_address(&tx.to)), + contract_address: Some(u256_to_address(&tx.to)), calldata: tx.data, factory_deps, value: tx.value, diff --git a/core/lib/types/src/protocol_upgrade.rs b/core/lib/types/src/protocol_upgrade.rs index 48f26dfd5c7f..7d8f678fa851 100644 --- a/core/lib/types/src/protocol_upgrade.rs +++ b/core/lib/types/src/protocol_upgrade.rs @@ -12,11 +12,10 @@ use zksync_contracts::{ BaseSystemContractsHashes, ADMIN_EXECUTE_UPGRADE_FUNCTION, ADMIN_UPGRADE_CHAIN_FROM_VERSION_FUNCTION, DIAMOND_CUT, }; -use zksync_utils::h256_to_u256; use crate::{ - abi, ethabi::ParamType, web3::Log, Address, Execute, ExecuteTransactionCommon, Transaction, - TransactionType, H256, U256, + abi, ethabi::ParamType, h256_to_u256, web3::Log, Address, Execute, ExecuteTransactionCommon, + Transaction, TransactionType, H256, U256, }; /// Represents a call to be made during governance operation. diff --git a/core/lib/types/src/snapshots.rs b/core/lib/types/src/snapshots.rs index 156d1e4723dd..b9ee62ab24ec 100644 --- a/core/lib/types/src/snapshots.rs +++ b/core/lib/types/src/snapshots.rs @@ -5,9 +5,8 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use serde::{Deserialize, Serialize}; use zksync_basic_types::{AccountTreeId, L1BatchNumber, L2BlockNumber, H256}; use zksync_protobuf::{required, ProtoFmt}; -use zksync_utils::u256_to_h256; -use crate::{utils, web3::Bytes, ProtocolVersionId, StorageKey, StorageValue, U256}; +use crate::{u256_to_h256, utils, web3::Bytes, ProtocolVersionId, StorageKey, StorageValue, U256}; /// Information about all snapshots persisted by the node. #[derive(Debug, Clone, Serialize, Deserialize)] @@ -331,9 +330,8 @@ pub fn uniform_hashed_keys_chunk(chunk_id: u64, chunk_count: u64) -> ops::RangeI #[cfg(test)] mod tests { - use zksync_utils::h256_to_u256; - use super::*; + use crate::h256_to_u256; #[test] fn chunking_is_correct() { diff --git a/core/lib/types/src/storage/log.rs b/core/lib/types/src/storage/log.rs index a05e25abccb5..075a05781b67 100644 --- a/core/lib/types/src/storage/log.rs +++ b/core/lib/types/src/storage/log.rs @@ -2,10 +2,10 @@ use std::mem; use serde::{Deserialize, Serialize}; use zksync_basic_types::AccountTreeId; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::{ api::ApiStorageLog, + h256_to_u256, u256_to_h256, zk_evm_types::{self, LogQuery, Timestamp}, StorageKey, StorageValue, U256, }; diff --git a/core/lib/types/src/storage/mod.rs b/core/lib/types/src/storage/mod.rs index 9ef037dc29b2..84a29ed8c039 100644 --- a/core/lib/types/src/storage/mod.rs +++ b/core/lib/types/src/storage/mod.rs @@ -5,9 +5,8 @@ pub use log::*; use serde::{Deserialize, Serialize}; use zksync_basic_types::{web3::keccak256, L2ChainId}; pub use zksync_system_constants::*; -use zksync_utils::{address_to_h256, u256_to_h256}; -use crate::{AccountTreeId, Address, H160, H256, U256}; +use crate::{address_to_h256, u256_to_h256, AccountTreeId, Address, H160, H256, U256}; pub mod log; pub mod witness_block_state; diff --git a/core/lib/types/src/transaction_request.rs b/core/lib/types/src/transaction_request.rs index a8713f301ba6..931615bad0fe 100644 --- a/core/lib/types/src/transaction_request.rs +++ b/core/lib/types/src/transaction_request.rs @@ -5,17 +5,15 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use zksync_basic_types::H256; use zksync_system_constants::{DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE, MAX_ENCODED_TX_SIZE}; -use zksync_utils::{ - bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}, - concat_and_hash, u256_to_h256, -}; +use zksync_utils::bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}; use super::{EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE}; use crate::{ fee::Fee, l1::L1Tx, l2::{L2Tx, TransactionType}, - web3::{keccak256, AccessList, Bytes}, + u256_to_h256, + web3::{keccak256, keccak256_concat, AccessList, Bytes}, Address, EIP712TypedStructure, Eip712Domain, L1TxCommonData, L2ChainId, Nonce, PackedEthSignature, StructBuilder, LEGACY_TX_TYPE, U256, U64, }; @@ -732,7 +730,7 @@ impl TransactionRequest { signed_message: H256, ) -> Result, SerializationTransactionError> { if self.is_eip712_tx() { - return Ok(Some(concat_and_hash( + return Ok(Some(keccak256_concat( signed_message, H256(keccak256(&self.get_signature()?)), ))); diff --git a/core/lib/types/src/tx/execute.rs b/core/lib/types/src/tx/execute.rs index 0edece9e46b4..0c3f63467cc4 100644 --- a/core/lib/types/src/tx/execute.rs +++ b/core/lib/types/src/tx/execute.rs @@ -1,9 +1,12 @@ use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; use zksync_system_constants::CONTRACT_DEPLOYER_ADDRESS; -use zksync_utils::{bytecode::hash_bytecode, ZeroPrefixHexSerde}; +use zksync_utils::bytecode::hash_bytecode; -use crate::{ethabi, Address, EIP712TypedStructure, StructBuilder, H256, U256}; +use crate::{ + ethabi, serde_wrappers::ZeroPrefixHexSerde, Address, EIP712TypedStructure, StructBuilder, H256, + U256, +}; /// This struct is the `serde` schema for the `Execute` struct. /// It allows us to modify `Execute` struct without worrying diff --git a/core/lib/types/src/utils.rs b/core/lib/types/src/utils.rs index bf086d6cdcd4..56a8ccf9fe9f 100644 --- a/core/lib/types/src/utils.rs +++ b/core/lib/types/src/utils.rs @@ -2,11 +2,10 @@ use std::fmt; use chrono::{DateTime, TimeZone, Utc}; use zksync_basic_types::{Address, H256}; -use zksync_utils::{address_to_h256, u256_to_h256}; use crate::{ - system_contracts::DEPLOYMENT_NONCE_INCREMENT, web3::keccak256, AccountTreeId, StorageKey, - L2_BASE_TOKEN_ADDRESS, U256, + address_to_h256, system_contracts::DEPLOYMENT_NONCE_INCREMENT, u256_to_h256, web3::keccak256, + AccountTreeId, StorageKey, L2_BASE_TOKEN_ADDRESS, U256, }; /// Displays a Unix timestamp (seconds since epoch) in human-readable form. Useful for logging. diff --git a/core/lib/utils/Cargo.toml b/core/lib/utils/Cargo.toml index 9b65ccdd29cb..9ba286a7e493 100644 --- a/core/lib/utils/Cargo.toml +++ b/core/lib/utils/Cargo.toml @@ -15,22 +15,16 @@ zksync_basic_types.workspace = true zk_evm.workspace = true zksync_vlog.workspace = true -bigdecimal.workspace = true const-decoder.workspace = true -num = { workspace = true, features = ["serde"] } -serde = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["time"] } tracing.workspace = true anyhow.workspace = true thiserror.workspace = true futures.workspace = true -hex.workspace = true reqwest = { workspace = true, features = ["blocking"] } serde_json.workspace = true once_cell.workspace = true [dev-dependencies] -rand.workspace = true tokio = { workspace = true, features = ["macros", "rt"] } -bincode.workspace = true assert_matches.workspace = true diff --git a/core/lib/utils/src/bytecode.rs b/core/lib/utils/src/bytecode.rs index 4fda5e9d48a0..fcba022f9277 100644 --- a/core/lib/utils/src/bytecode.rs +++ b/core/lib/utils/src/bytecode.rs @@ -1,11 +1,9 @@ -// FIXME: move to basic_types? +// FIXME (PLA-1064): move to basic_types use anyhow::Context as _; use zk_evm::k256::sha2::{Digest, Sha256}; use zksync_basic_types::{H256, U256}; -use crate::bytes_to_chunks; - const MAX_BYTECODE_LENGTH_IN_WORDS: usize = (1 << 16) - 1; const MAX_BYTECODE_LENGTH_BYTES: usize = MAX_BYTECODE_LENGTH_IN_WORDS * 32; @@ -42,6 +40,22 @@ pub fn validate_bytecode(code: &[u8]) -> Result<(), InvalidBytecodeError> { Ok(()) } +fn bytes_to_chunks(bytes: &[u8]) -> Vec<[u8; 32]> { + assert_eq!( + bytes.len() % 32, + 0, + "Bytes must be divisible by 32 to split into chunks" + ); + bytes + .chunks(32) + .map(|el| { + let mut chunk = [0u8; 32]; + chunk.copy_from_slice(el); + chunk + }) + .collect() +} + /// Hashes the provided EraVM bytecode. pub fn hash_bytecode(code: &[u8]) -> H256 { let chunked_code = bytes_to_chunks(code); diff --git a/core/lib/utils/src/convert.rs b/core/lib/utils/src/convert.rs deleted file mode 100644 index e086e385c8ef..000000000000 --- a/core/lib/utils/src/convert.rs +++ /dev/null @@ -1,185 +0,0 @@ -use std::convert::TryInto; - -use bigdecimal::BigDecimal; -use num::BigUint; -use zksync_basic_types::{Address, H256, U256}; - -pub fn u256_to_big_decimal(value: U256) -> BigDecimal { - let mut u32_digits = vec![0_u32; 8]; - // `u64_digit`s from `U256` are little-endian - for (i, &u64_digit) in value.0.iter().enumerate() { - u32_digits[2 * i] = u64_digit as u32; - u32_digits[2 * i + 1] = (u64_digit >> 32) as u32; - } - let value = BigUint::new(u32_digits); - BigDecimal::new(value.into(), 0) -} - -/// Converts `BigUint` value into the corresponding `U256` value. -fn biguint_to_u256(value: BigUint) -> U256 { - let bytes = value.to_bytes_le(); - U256::from_little_endian(&bytes) -} - -/// Converts `BigDecimal` value into the corresponding `U256` value. -pub fn bigdecimal_to_u256(value: BigDecimal) -> U256 { - let bigint = value.with_scale(0).into_bigint_and_exponent().0; - biguint_to_u256(bigint.to_biguint().unwrap()) -} - -fn ensure_chunkable(bytes: &[u8]) { - assert!( - bytes.len() % 32 == 0, - "Bytes must be divisible by 32 to split into chunks" - ); -} - -pub fn h256_to_u256(num: H256) -> U256 { - U256::from_big_endian(num.as_bytes()) -} - -pub fn address_to_h256(address: &Address) -> H256 { - let mut buffer = [0u8; 32]; - buffer[12..].copy_from_slice(address.as_bytes()); - H256(buffer) -} - -pub fn address_to_u256(address: &Address) -> U256 { - h256_to_u256(address_to_h256(address)) -} - -pub fn bytes_to_chunks(bytes: &[u8]) -> Vec<[u8; 32]> { - ensure_chunkable(bytes); - bytes - .chunks(32) - .map(|el| { - let mut chunk = [0u8; 32]; - chunk.copy_from_slice(el); - chunk - }) - .collect() -} - -pub fn be_chunks_to_h256_words(chunks: Vec<[u8; 32]>) -> Vec { - chunks.into_iter().map(|el| H256::from_slice(&el)).collect() -} - -pub fn bytes_to_be_words(vec: Vec) -> Vec { - ensure_chunkable(&vec); - vec.chunks(32).map(U256::from_big_endian).collect() -} - -pub fn be_words_to_bytes(words: &[U256]) -> Vec { - words - .iter() - .flat_map(|w| { - let mut bytes = [0u8; 32]; - w.to_big_endian(&mut bytes); - bytes - }) - .collect() -} - -pub fn u256_to_h256(num: U256) -> H256 { - let mut bytes = [0u8; 32]; - num.to_big_endian(&mut bytes); - H256::from_slice(&bytes) -} - -/// Converts `U256` value into the Address -pub fn u256_to_account_address(value: &U256) -> Address { - let mut bytes = [0u8; 32]; - value.to_big_endian(&mut bytes); - - Address::from_slice(&bytes[12..]) -} - -/// Converts `H256` value into the Address -pub fn h256_to_account_address(value: &H256) -> Address { - Address::from_slice(&value.as_bytes()[12..]) -} - -pub fn be_bytes_to_safe_address(bytes: &[u8]) -> Option
{ - if bytes.len() < 20 { - return None; - } - - let (zero_bytes, address_bytes) = bytes.split_at(bytes.len() - 20); - - if zero_bytes.iter().any(|b| *b != 0) { - None - } else { - Some(Address::from_slice(address_bytes)) - } -} - -/// Converts `h256` value as BE into the u32 -pub fn h256_to_u32(value: H256) -> u32 { - let be_u32_bytes: [u8; 4] = value[28..].try_into().unwrap(); - u32::from_be_bytes(be_u32_bytes) -} - -/// Converts u32 into the H256 as BE bytes -pub fn u32_to_h256(value: u32) -> H256 { - let mut result = [0u8; 32]; - result[28..].copy_from_slice(&value.to_be_bytes()); - H256(result) -} - -/// Converts `U256` value into bytes array -pub fn u256_to_bytes_be(value: &U256) -> Vec { - let mut bytes = vec![0u8; 32]; - value.to_big_endian(bytes.as_mut_slice()); - bytes -} - -#[cfg(test)] -mod test { - use num::BigInt; - use rand::{rngs::StdRng, Rng, SeedableRng}; - - use super::*; - - #[test] - fn test_u256_to_bigdecimal() { - const RNG_SEED: u64 = 123; - - let mut rng = StdRng::seed_from_u64(RNG_SEED); - // Small values. - for _ in 0..10_000 { - let value: u64 = rng.gen(); - let expected = BigDecimal::from(value); - assert_eq!(u256_to_big_decimal(value.into()), expected); - } - - // Arbitrary values - for _ in 0..10_000 { - let u64_digits: [u64; 4] = rng.gen(); - let value = u64_digits - .iter() - .enumerate() - .map(|(i, &digit)| U256::from(digit) << (i * 64)) - .fold(U256::zero(), |acc, x| acc + x); - let expected_value = u64_digits - .iter() - .enumerate() - .map(|(i, &digit)| BigInt::from(digit) << (i * 64)) - .fold(BigInt::from(0), |acc, x| acc + x); - assert_eq!( - u256_to_big_decimal(value), - BigDecimal::new(expected_value, 0) - ); - } - } - - #[test] - fn test_bigdecimal_to_u256() { - let value = BigDecimal::from(100u32); - let expected = U256::from(100u32); - assert_eq!(bigdecimal_to_u256(value), expected); - - let value = BigDecimal::new(BigInt::from(100), -2); - let expected = U256::from(10000u32); - assert_eq!(bigdecimal_to_u256(value), expected); - } -} diff --git a/core/lib/utils/src/format.rs b/core/lib/utils/src/format.rs deleted file mode 100644 index 9d15d4c358e7..000000000000 --- a/core/lib/utils/src/format.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Built-in deps -use std::collections::VecDeque; -use std::string::ToString; -// External deps -// Workspace deps - -/// Formats amount in wei to tokens with precision. -/// Behaves just like ethers.utils.formatUnits -pub fn format_units(wei: impl ToString, units: u8) -> String { - let mut chars: VecDeque = wei.to_string().chars().collect(); - - while chars.len() < units as usize { - chars.push_front('0'); - } - chars.insert(chars.len() - units as usize, '.'); - if *chars.front().unwrap() == '.' { - chars.push_front('0'); - } - while *chars.back().unwrap() == '0' { - chars.pop_back(); - } - if *chars.back().unwrap() == '.' { - chars.push_back('0'); - } - chars.iter().collect() -} - -/// Formats amount in wei to tokens. -/// Behaves just like js ethers.utils.formatEther -pub fn format_ether(wei: impl ToString) -> String { - format_units(wei, 18) -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_format_units() { - // Test vector of (decimals, wei input, expected output) - let vals = vec![ - (0, "1000000000000000100000", "1000000000000000100000.0"), - (1, "0", "0.0"), - (1, "11000000000000000000", "1100000000000000000.0"), - (2, "0", "0.0"), - (2, "1000000000000000100000", "10000000000000001000.0"), - (4, "10001000000", "1000100.0"), - (4, "10100000000000000000000", "1010000000000000000.0"), - (4, "110", "0.011"), - (6, "1000000000000000100000", "1000000000000000.1"), - (8, "0", "0.0"), - (8, "10100000000000000000000", "101000000000000.0"), - (8, "110", "0.0000011"), - (9, "10000000000000000001", "10000000000.000000001"), - (9, "11000000", "0.011"), - (9, "11000000000000000000", "11000000000.0"), - (10, "10001000000", "1.0001"), - (10, "20000000000000000000000", "2000000000000.0"), - (11, "0", "0.0"), - (11, "10100000000000000000000", "101000000000.0"), - (12, "1000000000000000100000", "1000000000.0000001"), - (12, "10001000000", "0.010001"), - (12, "10010000000", "0.01001"), - (12, "110", "0.00000000011"), - (13, "10010000000", "0.001001"), - (14, "10010000000", "0.0001001"), - (14, "110", "0.0000000000011"), - (15, "0", "0.0"), - (17, "1000000000000000100000", "10000.000000000001"), - (17, "10001000000", "0.00000010001"), - (18, "1000000000000000100000", "1000.0000000000001"), - ]; - - for (dec, input, output) in vals { - assert_eq!(format_units(&input, dec), output); - } - } -} diff --git a/core/lib/utils/src/lib.rs b/core/lib/utils/src/lib.rs index 92a1d7a0c470..e2ab70695113 100644 --- a/core/lib/utils/src/lib.rs +++ b/core/lib/utils/src/lib.rs @@ -1,13 +1,7 @@ //! Various helpers used in the ZKsync stack. pub mod bytecode; -mod convert; pub mod env; pub mod http_with_retries; -pub mod misc; pub mod panic_extractor; -mod serde_wrappers; -pub mod time; pub mod wait_for_tasks; - -pub use self::{convert::*, misc::*, serde_wrappers::*}; diff --git a/core/lib/utils/src/misc.rs b/core/lib/utils/src/misc.rs deleted file mode 100644 index 52bd7657c4e1..000000000000 --- a/core/lib/utils/src/misc.rs +++ /dev/null @@ -1,55 +0,0 @@ -use zksync_basic_types::{web3::keccak256, H256, U256}; - -pub const fn ceil_div(a: u64, b: u64) -> u64 { - if a == 0 { - a - } else { - (a - 1) / b + 1 - } -} - -pub fn ceil_div_u256(a: U256, b: U256) -> U256 { - (a + b - U256::from(1)) / b -} - -pub fn concat_and_hash(hash1: H256, hash2: H256) -> H256 { - let mut bytes = [0_u8; 64]; - bytes[..32].copy_from_slice(&hash1.0); - bytes[32..].copy_from_slice(&hash2.0); - H256(keccak256(&bytes)) -} - -pub fn expand_memory_contents(packed: &[(usize, U256)], memory_size_bytes: usize) -> Vec { - let mut result: Vec = vec![0; memory_size_bytes]; - - for (offset, value) in packed { - value.to_big_endian(&mut result[(offset * 32)..(offset + 1) * 32]); - } - - result -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ceil_div_u64_max() { - assert_eq!(0, ceil_div(u64::MIN, u64::MAX)); - assert_eq!(1, ceil_div(u64::MAX, u64::MAX)); - } - - #[test] - fn test_ceil_div_roundup_required() { - assert_eq!(3, ceil_div(5, 2)); - assert_eq!(4, ceil_div(10, 3)); - assert_eq!(3, ceil_div(15, 7)); - } - - #[test] - fn test_ceil_div_no_roundup_required() { - assert_eq!(2, ceil_div(4, 2)); - assert_eq!(2, ceil_div(6, 3)); - assert_eq!(2, ceil_div(14, 7)); - } -} diff --git a/core/lib/utils/src/time.rs b/core/lib/utils/src/time.rs deleted file mode 100644 index 70372db34f49..000000000000 --- a/core/lib/utils/src/time.rs +++ /dev/null @@ -1,19 +0,0 @@ -use std::time::{Duration, SystemTime, UNIX_EPOCH}; - -pub fn seconds_since_epoch() -> u64 { - duration_since_epoch().as_secs() -} - -pub fn millis_since(since: u64) -> u64 { - (millis_since_epoch() - since as u128 * 1000) as u64 -} - -pub fn millis_since_epoch() -> u128 { - duration_since_epoch().as_millis() -} - -fn duration_since_epoch() -> Duration { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("Incorrect system time") -} diff --git a/core/lib/vm_executor/src/oneshot/block.rs b/core/lib/vm_executor/src/oneshot/block.rs index d6118f15b98e..66bdd30e40ea 100644 --- a/core/lib/vm_executor/src/oneshot/block.rs +++ b/core/lib/vm_executor/src/oneshot/block.rs @@ -1,3 +1,5 @@ +use std::time::SystemTime; + use anyhow::Context; use zksync_dal::{Connection, Core, CoreDal, DalError}; use zksync_multivm::{ @@ -8,11 +10,10 @@ use zksync_types::{ api, block::{unpack_block_info, L2BlockHasher}, fee_model::BatchFeeInput, - AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, H256, + h256_to_u256, AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, H256, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::{h256_to_u256, time::seconds_since_epoch}; use super::{env::OneshotEnvParameters, ContractsKind}; @@ -124,7 +125,11 @@ impl BlockInfo { state_l2_block_number = sealed_l2_block_header.number; // Timestamp of the next L1 batch must be greater than the timestamp of the last L2 block. - l1_batch_timestamp = seconds_since_epoch().max(sealed_l2_block_header.timestamp + 1); + let current_timestamp = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .context("incorrect system time")? + .as_secs(); + l1_batch_timestamp = current_timestamp.max(sealed_l2_block_header.timestamp + 1); sealed_l2_block_header }; diff --git a/core/lib/vm_executor/src/oneshot/mod.rs b/core/lib/vm_executor/src/oneshot/mod.rs index 7d45dcca8cd3..e95164c0fc87 100644 --- a/core/lib/vm_executor/src/oneshot/mod.rs +++ b/core/lib/vm_executor/src/oneshot/mod.rs @@ -33,14 +33,14 @@ use zksync_multivm::{ }; use zksync_types::{ block::pack_block_info, - get_nonce_key, + get_nonce_key, h256_to_u256, l2::L2Tx, + u256_to_h256, utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, vm::FastVmMode, AccountTreeId, Nonce, StorageKey, Transaction, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; pub use self::{ block::{BlockInfo, ResolvedBlockInfo}, diff --git a/core/node/api_server/src/execution_sandbox/storage.rs b/core/node/api_server/src/execution_sandbox/storage.rs index c80356f6e36e..026ac58733a4 100644 --- a/core/node/api_server/src/execution_sandbox/storage.rs +++ b/core/node/api_server/src/execution_sandbox/storage.rs @@ -3,11 +3,10 @@ use zksync_multivm::interface::storage::{ReadStorage, StorageWithOverrides}; use zksync_types::{ api::state_override::{OverrideState, StateOverride}, - get_code_key, get_known_code_key, get_nonce_key, + get_code_key, get_known_code_key, get_nonce_key, h256_to_u256, u256_to_h256, utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, AccountTreeId, StorageKey, H256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; /// This method is blocking. pub(super) fn apply_state_override( diff --git a/core/node/api_server/src/testonly.rs b/core/node/api_server/src/testonly.rs index de6501716125..90001c908a9f 100644 --- a/core/node/api_server/src/testonly.rs +++ b/core/node/api_server/src/testonly.rs @@ -11,6 +11,7 @@ use zksync_dal::{Connection, Core, CoreDal}; use zksync_multivm::utils::derive_base_fee_and_gas_per_pubdata; use zksync_system_constants::{L2_BASE_TOKEN_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE}; use zksync_types::{ + address_to_u256, api::state_override::{Bytecode, OverrideAccount, OverrideState, StateOverride}, ethabi, ethabi::Token, @@ -20,11 +21,11 @@ use zksync_types::{ l1::L1Tx, l2::L2Tx, transaction_request::{CallRequest, Eip712Meta, PaymasterParams}, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, K256PrivateKey, L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, StorageKey, StorageLog, EIP_712_TX_TYPE, H256, U256, }; -use zksync_utils::{address_to_u256, u256_to_h256}; const EXPENSIVE_CONTRACT_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/expensive/expensive.sol/Expensive.json"; diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index 011d9e4e2b2f..180b53492839 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -24,7 +24,7 @@ use zksync_state_keeper::{ use zksync_types::{ api::state_override::StateOverride, fee_model::BatchFeeInput, - get_intrinsic_constants, + get_intrinsic_constants, h256_to_u256, l2::{error::TxCheckError::TxDuplication, L2Tx}, transaction_request::CallOverrides, utils::storage_key_for_eth_balance, @@ -32,7 +32,6 @@ use zksync_types::{ AccountTreeId, Address, L2ChainId, Nonce, ProtocolVersionId, Transaction, H160, H256, MAX_NEW_FACTORY_DEPS, U256, }; -use zksync_utils::h256_to_u256; use zksync_vm_executor::oneshot::{ CallOrExecute, EstimateGas, MultiVMBaseSystemContracts, OneshotEnvParameters, }; diff --git a/core/node/api_server/src/web3/namespaces/eth.rs b/core/node/api_server/src/web3/namespaces/eth.rs index e594af20d183..588316ce70e4 100644 --- a/core/node/api_server/src/web3/namespaces/eth.rs +++ b/core/node/api_server/src/web3/namespaces/eth.rs @@ -8,14 +8,12 @@ use zksync_types::{ }, l2::{L2Tx, TransactionType}, transaction_request::CallRequest, + u256_to_h256, utils::decompose_full_nonce, web3::{self, Bytes, SyncInfo, SyncState}, AccountTreeId, L2BlockNumber, StorageKey, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{ - bytecode::{prepare_evm_bytecode, BytecodeMarker}, - u256_to_h256, -}; +use zksync_utils::bytecode::{prepare_evm_bytecode, BytecodeMarker}; use zksync_web3_decl::{ error::Web3Error, types::{Address, Block, Filter, FilterChanges, Log, U64}, diff --git a/core/node/api_server/src/web3/namespaces/zks.rs b/core/node/api_server/src/web3/namespaces/zks.rs index 1a4114bd2c6a..05c90f0b0140 100644 --- a/core/node/api_server/src/web3/namespaces/zks.rs +++ b/core/node/api_server/src/web3/namespaces/zks.rs @@ -7,12 +7,14 @@ use zksync_mini_merkle_tree::MiniMerkleTree; use zksync_multivm::interface::VmExecutionResultAndLogs; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; use zksync_types::{ + address_to_h256, api::{ state_override::StateOverride, BlockDetails, BridgeAddresses, GetLogsFilter, L1BatchDetails, L2ToL1LogProof, Proof, ProtocolVersion, StorageProof, TransactionDetails, }, fee::Fee, fee_model::{FeeParams, PubdataIndependentBatchFeeModelInput}, + h256_to_u256, l1::L1Tx, l2::L2Tx, l2_to_l1_log::{l2_to_l1_logs_tree_size, L2ToL1Log}, @@ -23,7 +25,6 @@ use zksync_types::{ AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, Transaction, L1_MESSENGER_ADDRESS, L2_BASE_TOKEN_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, U256, U64, }; -use zksync_utils::{address_to_h256, h256_to_u256}; use zksync_web3_decl::{ error::Web3Error, types::{Address, Token, H256}, diff --git a/core/node/api_server/src/web3/tests/mod.rs b/core/node/api_server/src/web3/tests/mod.rs index b35bb9f5fad7..9080c5ba413c 100644 --- a/core/node/api_server/src/web3/tests/mod.rs +++ b/core/node/api_server/src/web3/tests/mod.rs @@ -40,16 +40,14 @@ use zksync_types::{ system_contracts::get_system_smart_contracts, tokens::{TokenInfo, TokenMetadata}, tx::IncludedTxLocation, + u256_to_h256, utils::{storage_key_for_eth_balance, storage_key_for_standard_token_balance}, AccountTreeId, Address, L1BatchNumber, Nonce, ProtocolVersionId, StorageKey, StorageLog, H256, U256, U64, }; -use zksync_utils::{ - bytecode::{ - hash_bytecode, hash_evm_bytecode, - testonly::{PROCESSED_EVM_BYTECODE, RAW_EVM_BYTECODE}, - }, - u256_to_h256, +use zksync_utils::bytecode::{ + hash_bytecode, hash_evm_bytecode, + testonly::{PROCESSED_EVM_BYTECODE, RAW_EVM_BYTECODE}, }; use zksync_vm_executor::oneshot::MockOneshotExecutor; use zksync_web3_decl::{ diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index 4e0426de7bfa..a82ca3b9e347 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -16,10 +16,9 @@ use zksync_multivm::interface::{ }; use zksync_types::{ api::ApiStorageLog, fee_model::BatchFeeInput, get_intrinsic_constants, - transaction_request::CallRequest, vm::FastVmMode, K256PrivateKey, L2ChainId, + transaction_request::CallRequest, u256_to_h256, vm::FastVmMode, K256PrivateKey, L2ChainId, PackedEthSignature, StorageLogKind, StorageLogWithPreviousValue, Transaction, U256, }; -use zksync_utils::u256_to_h256; use zksync_vm_executor::oneshot::{ BaseSystemContractsProvider, ContractsKind, MockOneshotExecutor, OneshotEnvParameters, ResolvedBlockInfo, diff --git a/core/node/base_token_adjuster/Cargo.toml b/core/node/base_token_adjuster/Cargo.toml index 9dcf5d796530..b326e7a6b42d 100644 --- a/core/node/base_token_adjuster/Cargo.toml +++ b/core/node/base_token_adjuster/Cargo.toml @@ -19,7 +19,6 @@ zksync_external_price_api.workspace = true zksync_contracts.workspace = true zksync_eth_client.workspace = true zksync_node_fee_model.workspace = true -zksync_utils.workspace = true vise.workspace = true bigdecimal.workspace = true diff --git a/core/node/commitment_generator/Cargo.toml b/core/node/commitment_generator/Cargo.toml index 1f4645414cbd..f0b4046bab42 100644 --- a/core/node/commitment_generator/Cargo.toml +++ b/core/node/commitment_generator/Cargo.toml @@ -16,7 +16,6 @@ zksync_types.workspace = true zksync_dal.workspace = true zksync_health_check.workspace = true zksync_l1_contract_interface.workspace = true -zksync_utils.workspace = true zksync_eth_client.workspace = true zksync_contracts.workspace = true zksync_multivm.workspace = true diff --git a/core/node/commitment_generator/src/lib.rs b/core/node/commitment_generator/src/lib.rs index 9a33d4766f6e..2ce0152abab6 100644 --- a/core/node/commitment_generator/src/lib.rs +++ b/core/node/commitment_generator/src/lib.rs @@ -12,10 +12,10 @@ use zksync_types::{ AuxCommitments, BlobHash, CommitmentCommonInput, CommitmentInput, L1BatchAuxiliaryOutput, L1BatchCommitment, L1BatchCommitmentArtifacts, L1BatchCommitmentMode, }, + h256_to_u256, writes::{InitialStorageWrite, RepeatedStorageWrite, StateDiffRecord}, L1BatchNumber, ProtocolVersionId, StorageKey, H256, U256, }; -use zksync_utils::h256_to_u256; use crate::{ metrics::{CommitmentStage, METRICS}, diff --git a/core/node/commitment_generator/src/utils.rs b/core/node/commitment_generator/src/utils.rs index d405a1256a29..cc44d7a03c71 100644 --- a/core/node/commitment_generator/src/utils.rs +++ b/core/node/commitment_generator/src/utils.rs @@ -21,13 +21,13 @@ use zksync_l1_contract_interface::i_executor::commit::kzg::ZK_SYNC_BYTES_PER_BLO use zksync_multivm::{interface::VmEvent, utils::get_used_bootloader_memory_bytes}; use zksync_system_constants::message_root::{AGG_TREE_HEIGHT_KEY, AGG_TREE_NODES_KEY}; use zksync_types::{ + address_to_u256, h256_to_u256, u256_to_h256, vm::VmVersion, web3::keccak256, zk_evm_types::{LogQuery, Timestamp}, AccountTreeId, L1BatchNumber, ProtocolVersionId, StorageKey, EVENT_WRITER_ADDRESS, H256, L2_MESSAGE_ROOT_ADDRESS, U256, }; -use zksync_utils::{address_to_u256, expand_memory_contents, h256_to_u256, u256_to_h256}; /// Encapsulates computations of commitment components. /// @@ -124,6 +124,15 @@ impl CommitmentComputer for RealCommitmentComputer { } } +fn expand_memory_contents(packed: &[(usize, U256)], memory_size_bytes: usize) -> Vec { + let mut result: Vec = vec![0; memory_size_bytes]; + + for (offset, value) in packed { + value.to_big_endian(&mut result[(offset * 32)..(offset + 1) * 32]); + } + + result +} fn to_log_query_1_3_3(log_query: LogQuery) -> LogQuery_1_3_3 { LogQuery_1_3_3 { timestamp: Timestamp_1_3_3(log_query.timestamp.0), diff --git a/core/node/consensus/Cargo.toml b/core/node/consensus/Cargo.toml index fdcc9089e339..120d355da9a8 100644 --- a/core/node/consensus/Cargo.toml +++ b/core/node/consensus/Cargo.toml @@ -30,7 +30,6 @@ zksync_state_keeper.workspace = true zksync_node_sync.workspace = true zksync_system_constants.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true zksync_web3_decl.workspace = true zksync_state.workspace = true zksync_vm_executor.workspace = true diff --git a/core/node/da_dispatcher/Cargo.toml b/core/node/da_dispatcher/Cargo.toml index 8a10d6813a5a..57d00cabaaa8 100644 --- a/core/node/da_dispatcher/Cargo.toml +++ b/core/node/da_dispatcher/Cargo.toml @@ -14,7 +14,6 @@ categories.workspace = true [dependencies] vise.workspace = true zksync_dal.workspace = true -zksync_utils.workspace = true zksync_config.workspace = true zksync_types.workspace = true zksync_da_client.workspace = true diff --git a/core/node/eth_sender/Cargo.toml b/core/node/eth_sender/Cargo.toml index a7aa88c3550e..a33536baa986 100644 --- a/core/node/eth_sender/Cargo.toml +++ b/core/node/eth_sender/Cargo.toml @@ -17,7 +17,6 @@ zksync_dal.workspace = true zksync_config.workspace = true zksync_contracts.workspace = true zksync_eth_client.workspace = true -zksync_utils.workspace = true zksync_l1_contract_interface.workspace = true zksync_object_store.workspace = true zksync_prover_interface.workspace = true diff --git a/core/node/eth_sender/src/eth_tx_manager.rs b/core/node/eth_sender/src/eth_tx_manager.rs index 7de91a3b7736..6992bea1007c 100644 --- a/core/node/eth_sender/src/eth_tx_manager.rs +++ b/core/node/eth_sender/src/eth_tx_manager.rs @@ -1,4 +1,7 @@ -use std::{sync::Arc, time::Duration}; +use std::{ + sync::Arc, + time::{Duration, SystemTime}, +}; use tokio::sync::watch; use zksync_config::configs::eth_sender::SenderConfig; @@ -9,7 +12,6 @@ use zksync_eth_client::{ use zksync_node_fee_model::l1_gas_price::TxParamsProvider; use zksync_shared_metrics::BlockL1Stage; use zksync_types::{eth_sender::EthTx, Address, L1BlockNumber, H256, U256}; -use zksync_utils::time::seconds_since_epoch; use super::{metrics::METRICS, EthSenderError}; use crate::{ @@ -501,9 +503,13 @@ impl EthTxManager { ); let tx_type_label = tx.tx_type.into(); METRICS.l1_gas_used[&tx_type_label].observe(gas_used.low_u128() as f64); - METRICS.l1_tx_mined_latency[&tx_type_label].observe(Duration::from_secs( - seconds_since_epoch() - tx.created_at_timestamp, - )); + + let duration_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("incorrect system time"); + let tx_latency = + duration_since_epoch.saturating_sub(Duration::from_secs(tx.created_at_timestamp)); + METRICS.l1_tx_mined_latency[&tx_type_label].observe(tx_latency); let sent_at_block = storage .eth_sender_dal() diff --git a/core/node/eth_sender/src/metrics.rs b/core/node/eth_sender/src/metrics.rs index 462fe3ed6e59..571837036045 100644 --- a/core/node/eth_sender/src/metrics.rs +++ b/core/node/eth_sender/src/metrics.rs @@ -1,12 +1,14 @@ //! Metrics for the Ethereum sender component. -use std::{fmt, time::Duration}; +use std::{ + fmt, + time::{Duration, SystemTime}, +}; use vise::{Buckets, Counter, EncodeLabelSet, EncodeLabelValue, Family, Gauge, Histogram, Metrics}; use zksync_dal::{Connection, Core, CoreDal}; use zksync_shared_metrics::{BlockL1Stage, BlockStage, APP_METRICS}; use zksync_types::{aggregated_operations::AggregatedActionType, eth_sender::EthTx}; -use zksync_utils::time::seconds_since_epoch; use crate::abstract_l1_interface::{L1BlockNumbers, OperatorType}; @@ -143,10 +145,13 @@ impl EthSenderMetrics { return; } + let duration_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("incorrect system time"); for statistics in l1_batches_statistics { - APP_METRICS.block_latency[&stage].observe(Duration::from_secs( - seconds_since_epoch() - statistics.timestamp, - )); + let block_latency = + duration_since_epoch.saturating_sub(Duration::from_secs(statistics.timestamp)); + APP_METRICS.block_latency[&stage].observe(block_latency); APP_METRICS.processed_txs[&stage.into()] .inc_by(statistics.l2_tx_count as u64 + statistics.l1_tx_count as u64); APP_METRICS.processed_l1_txs[&stage.into()].inc_by(statistics.l1_tx_count as u64); diff --git a/core/node/genesis/src/lib.rs b/core/node/genesis/src/lib.rs index 82732342b407..03f51d5c5fc3 100644 --- a/core/node/genesis/src/lib.rs +++ b/core/node/genesis/src/lib.rs @@ -22,11 +22,12 @@ use zksync_types::{ protocol_upgrade::decode_set_chain_id_event, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, system_contracts::get_system_smart_contracts, + u256_to_h256, web3::{BlockNumber, FilterBuilder}, AccountTreeId, Address, Bloom, L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, H256, U256, }; -use zksync_utils::{bytecode::hash_bytecode, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use crate::utils::{ add_eth_token, get_deduped_log_queries, get_storage_logs, diff --git a/core/node/genesis/src/utils.rs b/core/node/genesis/src/utils.rs index 6042513537cd..d89d7475e84b 100644 --- a/core/node/genesis/src/utils.rs +++ b/core/node/genesis/src/utils.rs @@ -11,12 +11,13 @@ use zksync_system_constants::{DEFAULT_ERA_CHAIN_ID, ETHEREUM_ADDRESS}; use zksync_types::{ block::{DeployedContract, L1BatchTreeData}, commitment::L1BatchCommitment, - get_code_key, get_known_code_key, get_system_context_init_logs, + get_code_key, get_known_code_key, get_system_context_init_logs, h256_to_u256, tokens::{TokenInfo, TokenMetadata}, + u256_to_h256, zk_evm_types::{LogQuery, Timestamp}, AccountTreeId, L1BatchNumber, L2BlockNumber, L2ChainId, StorageKey, StorageLog, H256, }; -use zksync_utils::{be_words_to_bytes, bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use crate::GenesisError; @@ -132,7 +133,7 @@ pub(super) async fn insert_base_system_contracts_to_factory_deps( let factory_deps = [&contracts.bootloader, &contracts.default_aa] .into_iter() .chain(contracts.evm_emulator.as_ref()) - .map(|c| (c.hash, be_words_to_bytes(&c.code))) + .map(|c| (c.hash, c.code.clone())) .collect(); Ok(storage diff --git a/core/node/metadata_calculator/Cargo.toml b/core/node/metadata_calculator/Cargo.toml index 5b566c09ff68..0d0522939e91 100644 --- a/core/node/metadata_calculator/Cargo.toml +++ b/core/node/metadata_calculator/Cargo.toml @@ -19,7 +19,6 @@ zksync_types.workspace = true zksync_config.workspace = true zksync_storage.workspace = true zksync_shared_metrics.workspace = true -zksync_utils.workspace = true zksync_object_store.workspace = true vise.workspace = true diff --git a/core/node/metadata_calculator/src/api_server/mod.rs b/core/node/metadata_calculator/src/api_server/mod.rs index 4612d859a3dd..ced29310408e 100644 --- a/core/node/metadata_calculator/src/api_server/mod.rs +++ b/core/node/metadata_calculator/src/api_server/mod.rs @@ -18,8 +18,7 @@ use zksync_merkle_tree::{ unstable::{NodeKey, RawNode}, NoVersionError, ValueHash, }; -use zksync_types::{web3, L1BatchNumber, H256, U256}; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, web3, L1BatchNumber, H256, U256}; use self::metrics::{MerkleTreeApiMethod, API_METRICS}; use crate::{AsyncTreeReader, LazyAsyncTreeReader, MerkleTreeInfo}; diff --git a/core/node/metadata_calculator/src/metrics.rs b/core/node/metadata_calculator/src/metrics.rs index 7eb49b95afd4..c6d7094ef839 100644 --- a/core/node/metadata_calculator/src/metrics.rs +++ b/core/node/metadata_calculator/src/metrics.rs @@ -1,6 +1,6 @@ //! Metrics for `MetadataCalculator`. -use std::time::{Duration, Instant}; +use std::time::{Duration, Instant, SystemTime}; use vise::{ Buckets, DurationAsSecs, EncodeLabelSet, EncodeLabelValue, Family, Gauge, Histogram, Info, @@ -9,7 +9,6 @@ use vise::{ use zksync_config::configs::database::MerkleTreeMode; use zksync_shared_metrics::{BlockStage, APP_METRICS}; use zksync_types::block::L1BatchHeader; -use zksync_utils::time::seconds_since_epoch; use super::{MetadataCalculator, MetadataCalculatorConfig}; @@ -187,6 +186,11 @@ impl MetadataCalculator { total_logs: usize, start: Instant, ) { + let (Some(first_header), Some(last_header)) = (batch_headers.first(), batch_headers.last()) + else { + return; + }; + let elapsed = start.elapsed(); METRICS.update_tree_latency.observe(elapsed); if total_logs > 0 { @@ -205,17 +209,20 @@ impl MetadataCalculator { METRICS.log_batch.observe(total_logs); METRICS.blocks_batch.observe(batch_headers.len()); - let first_batch_number = batch_headers.first().unwrap().number.0; - let last_batch_number = batch_headers.last().unwrap().number.0; + let first_batch_number = first_header.number.0; + let last_batch_number = last_header.number.0; tracing::info!( "L1 batches #{:?} processed in tree", first_batch_number..=last_batch_number ); APP_METRICS.block_number[&BlockStage::Tree].set(last_batch_number.into()); + let duration_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("incorrect system time"); let latency = - seconds_since_epoch().saturating_sub(batch_headers.first().unwrap().timestamp); - APP_METRICS.block_latency[&BlockStage::Tree].observe(Duration::from_secs(latency)); + duration_since_epoch.saturating_sub(Duration::from_secs(first_header.timestamp)); + APP_METRICS.block_latency[&BlockStage::Tree].observe(latency); } } diff --git a/core/node/metadata_calculator/src/tests.rs b/core/node/metadata_calculator/src/tests.rs index 1c003c4ecf78..9717ce5682ce 100644 --- a/core/node/metadata_calculator/src/tests.rs +++ b/core/node/metadata_calculator/src/tests.rs @@ -23,7 +23,6 @@ use zksync_types::{ block::{L1BatchHeader, L1BatchTreeData}, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, StorageKey, StorageLog, H256, }; -use zksync_utils::u32_to_h256; use super::{ helpers::L1BatchWithLogs, GenericAsyncTree, MetadataCalculator, MetadataCalculatorConfig, @@ -904,9 +903,9 @@ pub(crate) fn gen_storage_logs( let proof_keys = accounts.iter().flat_map(|&account| { account_keys .clone() - .map(move |i| StorageKey::new(account, u32_to_h256(i))) + .map(move |i| StorageKey::new(account, H256::from_low_u64_be(i.into()))) }); - let proof_values = indices.map(u32_to_h256); + let proof_values = indices.map(|i| H256::from_low_u64_be(i.into())); let logs: Vec<_> = proof_keys .zip(proof_values) diff --git a/core/node/node_sync/src/client.rs b/core/node/node_sync/src/client.rs index ee89db10ddd1..d56d8ebc2631 100644 --- a/core/node/node_sync/src/client.rs +++ b/core/node/node_sync/src/client.rs @@ -8,7 +8,7 @@ use zksync_health_check::{CheckHealth, Health, HealthStatus}; use zksync_system_constants::ACCOUNT_CODE_STORAGE_ADDRESS; use zksync_types::{ api::{self, en}, - get_code_key, Address, L2BlockNumber, ProtocolVersionId, H256, U64, + get_code_key, h256_to_u256, Address, L2BlockNumber, ProtocolVersionId, H256, U64, }; use zksync_web3_decl::{ client::{DynClient, L2}, @@ -81,7 +81,7 @@ impl MainNodeClient for Box> { let code_hash = self .get_storage_at( ACCOUNT_CODE_STORAGE_ADDRESS, - zksync_utils::h256_to_u256(*code_key.key()), + h256_to_u256(*code_key.key()), Some(GENESIS_BLOCK), ) .rpc_context("get_storage_at") diff --git a/core/node/node_sync/src/external_io.rs b/core/node/node_sync/src/external_io.rs index 0f5f4d6253fa..d3d908cfc169 100644 --- a/core/node/node_sync/src/external_io.rs +++ b/core/node/node_sync/src/external_io.rs @@ -20,7 +20,6 @@ use zksync_types::{ protocol_version::{ProtocolSemanticVersion, VersionPatch}, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, }; -use zksync_utils::bytes_to_be_words; use zksync_vm_executor::storage::L1BatchParamsProvider; use super::{ @@ -75,7 +74,7 @@ impl ExternalIO { Ok(match bytecode { Some(bytecode) => SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }, None => { @@ -98,7 +97,7 @@ impl ExternalIO { ) .await?; SystemContractCode { - code: bytes_to_be_words(contract_bytecode), + code: contract_bytecode, hash, } } diff --git a/core/node/node_sync/src/genesis.rs b/core/node/node_sync/src/genesis.rs index c5d4869175df..7401bdd9c9d4 100644 --- a/core/node/node_sync/src/genesis.rs +++ b/core/node/node_sync/src/genesis.rs @@ -110,20 +110,17 @@ async fn fetch_base_system_contracts( .fetch_system_contract_by_hash(hash) .await? .context("EVM emulator bytecode is missing on main node")?; - Some(SystemContractCode { - code: zksync_utils::bytes_to_be_words(bytes), - hash, - }) + Some(SystemContractCode { code: bytes, hash }) } else { None }; Ok(BaseSystemContracts { bootloader: SystemContractCode { - code: zksync_utils::bytes_to_be_words(bootloader_bytecode), + code: bootloader_bytecode, hash: contract_hashes.bootloader, }, default_aa: SystemContractCode { - code: zksync_utils::bytes_to_be_words(default_aa_bytecode), + code: default_aa_bytecode, hash: contract_hashes.default_aa, }, evm_emulator, diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index e2ddc972a2f5..1bcda394a674 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -18,7 +18,6 @@ zksync_object_store.workspace = true zksync_prover_interface.workspace = true zksync_types.workspace = true zksync_vm_executor.workspace = true -zksync_utils.workspace = true anyhow.workspace = true axum.workspace = true tokio.workspace = true diff --git a/core/node/state_keeper/src/executor/tests/tester.rs b/core/node/state_keeper/src/executor/tests/tester.rs index 800bf398938d..247703c866e4 100644 --- a/core/node/state_keeper/src/executor/tests/tester.rs +++ b/core/node/state_keeper/src/executor/tests/tester.rs @@ -30,12 +30,12 @@ use zksync_types::{ protocol_version::ProtocolSemanticVersion, snapshots::{SnapshotRecoveryStatus, SnapshotStorageLog}, system_contracts::get_system_smart_contracts, + u256_to_h256, utils::storage_key_for_standard_token_balance, vm::FastVmMode, AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, PriorityOpId, ProtocolVersionId, StorageLog, Transaction, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use zksync_vm_executor::batch::{MainBatchExecutorFactory, TraceCalls}; use super::{read_storage_factory::RocksdbStorageFactory, StorageType}; diff --git a/core/node/state_keeper/src/io/mempool.rs b/core/node/state_keeper/src/io/mempool.rs index 370d46fd544c..991ecee699c3 100644 --- a/core/node/state_keeper/src/io/mempool.rs +++ b/core/node/state_keeper/src/io/mempool.rs @@ -20,8 +20,6 @@ use zksync_types::{ utils::display_timestamp, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, U256, }; -// TODO (SMA-1206): use seconds instead of milliseconds. -use zksync_utils::time::millis_since_epoch; use zksync_vm_executor::storage::L1BatchParamsProvider; use crate::{ @@ -36,6 +34,7 @@ use crate::{ IoSealCriteria, L2BlockMaxPayloadSizeSealer, TimeoutSealer, UnexecutableReason, }, updates::UpdatesManager, + utils::millis_since_epoch, MempoolGuard, }; @@ -531,9 +530,9 @@ impl MempoolIO { #[cfg(test)] mod tests { use tokio::time::timeout_at; - use zksync_utils::time::seconds_since_epoch; use super::*; + use crate::tests::seconds_since_epoch; // This test defensively uses large deadlines in order to account for tests running in parallel etc. #[tokio::test] diff --git a/core/node/state_keeper/src/io/persistence.rs b/core/node/state_keeper/src/io/persistence.rs index 8bfd812c8a1f..d8fd99bfc95d 100644 --- a/core/node/state_keeper/src/io/persistence.rs +++ b/core/node/state_keeper/src/io/persistence.rs @@ -7,8 +7,7 @@ use async_trait::async_trait; use tokio::sync::{mpsc, oneshot}; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_shared_metrics::{BlockStage, APP_METRICS}; -use zksync_types::{writes::TreeWrite, Address, ProtocolVersionId}; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, writes::TreeWrite, Address, ProtocolVersionId}; use crate::{ io::{ @@ -387,10 +386,9 @@ mod tests { use zksync_multivm::interface::{FinishedL1Batch, VmExecutionMetrics}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_types::{ - api::TransactionStatus, block::BlockGasCount, writes::StateDiffRecord, L1BatchNumber, - L2BlockNumber, StorageLogKind, H256, U256, + api::TransactionStatus, block::BlockGasCount, h256_to_u256, writes::StateDiffRecord, + L1BatchNumber, L2BlockNumber, StorageLogKind, H256, U256, }; - use zksync_utils::h256_to_u256; use super::*; use crate::{ diff --git a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs index 53871c54a19f..a6356a838602 100644 --- a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs +++ b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs @@ -5,11 +5,10 @@ use zksync_dal::{Connection, Core, CoreDal}; use zksync_multivm::interface::VmEvent; use zksync_system_constants::{CONTRACT_DEPLOYER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS}; use zksync_types::{ - ethabi, + ethabi, h256_to_address, tokens::{TokenInfo, TokenMetadata}, Address, L2BlockNumber, H256, }; -use zksync_utils::h256_to_account_address; use crate::{ io::seal_logic::SealStrategy, @@ -28,9 +27,9 @@ fn extract_added_tokens( event.address == CONTRACT_DEPLOYER_ADDRESS && event.indexed_topics.len() == 4 && event.indexed_topics[0] == VmEvent::DEPLOY_EVENT_SIGNATURE - && h256_to_account_address(&event.indexed_topics[1]) == l2_token_deployer_addr + && h256_to_address(&event.indexed_topics[1]) == l2_token_deployer_addr }) - .map(|event| h256_to_account_address(&event.indexed_topics[3])); + .map(|event| h256_to_address(&event.indexed_topics[3])); extract_added_token_info_from_addresses(all_generated_events, deployed_tokens) } @@ -73,7 +72,7 @@ fn extract_added_token_info_from_addresses( || event.indexed_topics[0] == *BRIDGE_INITIALIZATION_SIGNATURE_OLD) }) .map(|event| { - let l1_token_address = h256_to_account_address(&event.indexed_topics[1]); + let l1_token_address = h256_to_address(&event.indexed_topics[1]); let mut dec_ev = ethabi::decode( &[ ethabi::ParamType::String, @@ -467,11 +466,11 @@ mod tests { use zksync_types::{ block::L2BlockHeader, commitment::PubdataParams, + h256_to_u256, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, AccountTreeId, Address, L1BatchNumber, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, }; - use zksync_utils::h256_to_u256; use super::*; use crate::updates::L2BlockUpdates; diff --git a/core/node/state_keeper/src/io/seal_logic/mod.rs b/core/node/state_keeper/src/io/seal_logic/mod.rs index 7f05bda7a6f5..419413e127d3 100644 --- a/core/node/state_keeper/src/io/seal_logic/mod.rs +++ b/core/node/state_keeper/src/io/seal_logic/mod.rs @@ -22,11 +22,11 @@ use zksync_types::{ helpers::unix_timestamp_ms, l2_to_l1_log::UserL2ToL1Log, tx::IncludedTxLocation, + u256_to_h256, utils::display_timestamp, Address, BloomInput, ExecuteTransactionCommon, ProtocolVersionId, StorageKey, StorageLog, Transaction, H256, }; -use zksync_utils::u256_to_h256; use crate::{ io::seal_logic::l2_block_seal_subtasks::L2BlockSealProcess, diff --git a/core/node/state_keeper/src/io/tests/mod.rs b/core/node/state_keeper/src/io/tests/mod.rs index 7196236475df..4ea3460e6e30 100644 --- a/core/node/state_keeper/src/io/tests/mod.rs +++ b/core/node/state_keeper/src/io/tests/mod.rs @@ -20,17 +20,14 @@ use zksync_types::{ AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, TransactionTimeRangeConstraint, H256, U256, }; -use zksync_utils::{ - bytecode::{hash_bytecode, hash_evm_bytecode}, - time::seconds_since_epoch, -}; +use zksync_utils::bytecode::{hash_bytecode, hash_evm_bytecode}; use self::tester::Tester; use crate::{ io::{seal_logic::l2_block_seal_subtasks::L2BlockSealProcess, StateKeeperIO}, mempool_actor::l2_tx_filter, testonly::BASE_SYSTEM_CONTRACTS, - tests::{create_execution_result, create_transaction, Query}, + tests::{create_execution_result, create_transaction, seconds_since_epoch, Query}, updates::{L2BlockSealCommand, L2BlockUpdates, UpdatesManager}, StateKeeperOutputHandler, StateKeeperPersistence, }; diff --git a/core/node/state_keeper/src/mempool_actor.rs b/core/node/state_keeper/src/mempool_actor.rs index 8e9d674f8787..fea1fcf89291 100644 --- a/core/node/state_keeper/src/mempool_actor.rs +++ b/core/node/state_keeper/src/mempool_actor.rs @@ -171,7 +171,9 @@ async fn get_transaction_nonces( Ok(nonce_values .into_iter() .map(|(nonce_key, nonce_value)| { - let nonce = Nonce(zksync_utils::h256_to_u32(nonce_value)); + // `unwrap()` is safe by construction. + let be_u32_bytes: [u8; 4] = nonce_value[28..].try_into().unwrap(); + let nonce = Nonce(u32::from_be_bytes(be_u32_bytes)); (address_by_nonce_key[&nonce_key], nonce) }) .collect()) @@ -183,8 +185,9 @@ mod tests { use zksync_node_fee_model::MockBatchFeeParamsProvider; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::create_l2_transaction; - use zksync_types::{L2BlockNumber, PriorityOpId, ProtocolVersionId, StorageLog, H256}; - use zksync_utils::u256_to_h256; + use zksync_types::{ + u256_to_h256, L2BlockNumber, PriorityOpId, ProtocolVersionId, StorageLog, H256, + }; use super::*; diff --git a/core/node/state_keeper/src/seal_criteria/mod.rs b/core/node/state_keeper/src/seal_criteria/mod.rs index c10b01e7e73d..4c6f56a6f5b7 100644 --- a/core/node/state_keeper/src/seal_criteria/mod.rs +++ b/core/node/state_keeper/src/seal_criteria/mod.rs @@ -20,18 +20,17 @@ use zksync_multivm::{ use zksync_types::{ block::BlockGasCount, utils::display_timestamp, ProtocolVersionId, Transaction, }; -use zksync_utils::time::millis_since; - -mod conditional_sealer; -pub(super) mod criteria; pub use self::conditional_sealer::{ConditionalSealer, NoopSealer, SequencerSealer}; -use super::{ +use crate::{ metrics::AGGREGATION_METRICS, updates::UpdatesManager, - utils::{gas_count_from_tx_and_metrics, gas_count_from_writes}, + utils::{gas_count_from_tx_and_metrics, gas_count_from_writes, millis_since}, }; +mod conditional_sealer; +pub(super) mod criteria; + fn halt_as_metric_label(halt: &Halt) -> &'static str { match halt { Halt::ValidationFailed(_) => "ValidationFailed", @@ -278,10 +277,10 @@ impl L2BlockMaxPayloadSizeSealer { #[cfg(test)] mod tests { - use zksync_utils::time::seconds_since_epoch; - use super::*; - use crate::tests::{create_execution_result, create_transaction, create_updates_manager}; + use crate::tests::{ + create_execution_result, create_transaction, create_updates_manager, seconds_since_epoch, + }; fn apply_tx_to_manager(tx: Transaction, manager: &mut UpdatesManager) { manager.extend_from_executed_transaction( diff --git a/core/node/state_keeper/src/testonly/mod.rs b/core/node/state_keeper/src/testonly/mod.rs index b0f641ccbc1a..023613cda61d 100644 --- a/core/node/state_keeper/src/testonly/mod.rs +++ b/core/node/state_keeper/src/testonly/mod.rs @@ -14,11 +14,11 @@ use zksync_multivm::interface::{ use zksync_state::OwnedStorage; use zksync_test_account::Account; use zksync_types::{ - commitment::PubdataParams, fee::Fee, utils::storage_key_for_standard_token_balance, - AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, PriorityOpId, StorageLog, - Transaction, L2_BASE_TOKEN_ADDRESS, SYSTEM_CONTEXT_MINIMAL_BASE_FEE, U256, + commitment::PubdataParams, fee::Fee, u256_to_h256, + utils::storage_key_for_standard_token_balance, AccountTreeId, Address, Execute, L1BatchNumber, + L2BlockNumber, PriorityOpId, StorageLog, Transaction, L2_BASE_TOKEN_ADDRESS, + SYSTEM_CONTEXT_MINIMAL_BASE_FEE, U256, }; -use zksync_utils::u256_to_h256; pub mod test_batch_executor; diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index 28e2f9886b49..ca078354c896 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -3,7 +3,7 @@ use std::{ atomic::{AtomicBool, AtomicU64, Ordering}, Arc, }, - time::Instant, + time::{Instant, SystemTime, UNIX_EPOCH}, }; use tokio::sync::watch; @@ -20,11 +20,10 @@ use zksync_types::{ aggregated_operations::AggregatedActionType, block::{BlockGasCount, L2BlockExecutionData, L2BlockHasher}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, - AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, StorageKey, - StorageLog, StorageLogKind, StorageLogWithPreviousValue, Transaction, H256, U256, - ZKPORTER_IS_AVAILABLE, + u256_to_h256, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, + ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, + Transaction, H256, U256, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::u256_to_h256; use crate::{ io::PendingBatchData, @@ -46,6 +45,13 @@ use crate::{ ZkSyncStateKeeper, }; +pub(crate) fn seconds_since_epoch() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Incorrect system time") + .as_secs() +} + /// Creates a mock `PendingBatchData` object containing the provided sequence of L2 blocks. pub(crate) fn pending_batch_data(pending_l2_blocks: Vec) -> PendingBatchData { PendingBatchData { diff --git a/core/node/state_keeper/src/utils.rs b/core/node/state_keeper/src/utils.rs index 4240ad306251..320dd49583ed 100644 --- a/core/node/state_keeper/src/utils.rs +++ b/core/node/state_keeper/src/utils.rs @@ -1,3 +1,5 @@ +use std::time::{SystemTime, UNIX_EPOCH}; + use zksync_multivm::interface::{DeduplicatedWritesMetrics, VmExecutionMetrics}; use zksync_types::{ aggregated_operations::AggregatedActionType, block::BlockGasCount, ExecuteTransactionCommon, @@ -86,3 +88,15 @@ pub(super) fn gas_count_from_writes( execute: 0, } } + +// TODO (SMA-1206): use seconds instead of milliseconds. +pub(super) fn millis_since_epoch() -> u128 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Incorrect system time") + .as_millis() +} + +pub(super) fn millis_since(since: u64) -> u64 { + (millis_since_epoch() - since as u128 * 1000) as u64 +} diff --git a/core/node/test_utils/Cargo.toml b/core/node/test_utils/Cargo.toml index 6df100c51a7d..fd657c7d82c0 100644 --- a/core/node/test_utils/Cargo.toml +++ b/core/node/test_utils/Cargo.toml @@ -17,4 +17,3 @@ zksync_contracts.workspace = true zksync_merkle_tree.workspace = true zksync_system_constants.workspace = true zksync_vm_interface.workspace = true -zksync_utils.workspace = true diff --git a/core/node/test_utils/src/lib.rs b/core/node/test_utils/src/lib.rs index 2b446fff12c5..9a02c18cd235 100644 --- a/core/node/test_utils/src/lib.rs +++ b/core/node/test_utils/src/lib.rs @@ -226,7 +226,7 @@ impl Snapshot { factory_deps: [&contracts.bootloader, &contracts.default_aa] .into_iter() .chain(contracts.evm_emulator.as_ref()) - .map(|c| (c.hash, zksync_utils::be_words_to_bytes(&c.code))) + .map(|c| (c.hash, c.code.clone())) .collect(), storage_logs, } diff --git a/core/node/vm_runner/src/impls/bwip.rs b/core/node/vm_runner/src/impls/bwip.rs index a2cf126f5499..5d63d09b5caf 100644 --- a/core/node/vm_runner/src/impls/bwip.rs +++ b/core/node/vm_runner/src/impls/bwip.rs @@ -1,4 +1,7 @@ -use std::{collections::HashSet, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; use anyhow::anyhow; use async_trait::async_trait; @@ -8,10 +11,9 @@ use zksync_object_store::ObjectStore; use zksync_prover_interface::inputs::VMRunWitnessInputData; use zksync_state::OwnedStorage; use zksync_types::{ - block::StorageOracleInfo, witness_block_state::WitnessStorageState, L1BatchNumber, L2ChainId, - H256, + block::StorageOracleInfo, h256_to_u256, u256_to_h256, witness_block_state::WitnessStorageState, + L1BatchNumber, L2ChainId, H256, }; -use zksync_utils::{bytes_to_chunks, h256_to_u256, u256_to_h256}; use zksync_vm_interface::{executor::BatchExecutorFactory, L1BatchEnv, L2BlockEnv, SystemEnv}; use crate::{ @@ -224,7 +226,6 @@ async fn get_updates_manager_witness_input_data( .get_sealed_factory_dep(default_aa) .await? .ok_or_else(|| anyhow!("Default account bytecode should exist"))?; - let account_bytecode = bytes_to_chunks(&account_bytecode_bytes); let used_contract_hashes = &output.batch.final_execution_state.used_contract_hashes; let hashes: HashSet = used_contract_hashes @@ -238,7 +239,7 @@ async fn get_updates_manager_witness_input_data( .get_factory_deps(&hashes) .await; if used_contract_hashes.contains(&account_code_hash) { - used_bytecodes.insert(account_code_hash, account_bytecode); + used_bytecodes.insert(account_code_hash, account_bytecode_bytes); } let evm_emulator_code_hash = if let Some(evm_emulator) = evm_emulator { @@ -249,7 +250,6 @@ async fn get_updates_manager_witness_input_data( .get_sealed_factory_dep(evm_emulator) .await? .ok_or_else(|| anyhow!("EVM emulator bytecode should exist"))?; - let evm_emulator_bytecode = bytes_to_chunks(&evm_emulator_bytecode); used_bytecodes.insert(evm_emulator_code_hash, evm_emulator_bytecode); } Some(evm_emulator_code_hash) @@ -266,7 +266,10 @@ async fn get_updates_manager_witness_input_data( Ok(VMRunWitnessInputData { l1_batch_number, - used_bytecodes, + used_bytecodes: used_bytecodes + .into_iter() + .map(|(hash, code)| (hash, bytes_to_chunks(&code))) + .collect(), initial_heap_content, protocol_version: system_env.version, bootloader_code, @@ -278,6 +281,13 @@ async fn get_updates_manager_witness_input_data( }) } +fn bytes_to_chunks(bytes: &[u8]) -> Vec<[u8; 32]> { + bytes + .chunks(32) + .map(|chunk| chunk.try_into().unwrap()) + .collect() +} + #[tracing::instrument(skip_all)] async fn assert_database_witness_input_data( connection: &mut Connection<'_, Core>, @@ -305,7 +315,6 @@ async fn assert_database_witness_input_data( .await .expect("Failed fetching default account bytecode from DB") .expect("Default account bytecode should exist"); - let account_bytecode = bytes_to_chunks(&account_bytecode_bytes); let hashes: HashSet = block_header .used_contract_hashes @@ -322,7 +331,7 @@ async fn assert_database_witness_input_data( .used_contract_hashes .contains(&account_code_hash) { - used_bytecodes.insert(account_code_hash, account_bytecode); + used_bytecodes.insert(account_code_hash, account_bytecode_bytes); } assert_eq!( @@ -331,6 +340,10 @@ async fn assert_database_witness_input_data( "{} factory deps are not found in DB", hashes.len() - used_bytecodes.len() ); + let used_bytecodes: HashMap<_, _> = used_bytecodes + .into_iter() + .map(|(hash, code)| (hash, bytes_to_chunks(&code))) + .collect(); let StorageOracleInfo { storage_refunds, diff --git a/core/node/vm_runner/src/tests/mod.rs b/core/node/vm_runner/src/tests/mod.rs index a3438d5a4e11..d56c70e5808d 100644 --- a/core/node/vm_runner/src/tests/mod.rs +++ b/core/node/vm_runner/src/tests/mod.rs @@ -13,13 +13,14 @@ use zksync_test_account::Account; use zksync_types::{ block::{L1BatchHeader, L2BlockHasher}, fee::Fee, - get_intrinsic_constants, + get_intrinsic_constants, h256_to_u256, l2::L2Tx, + u256_to_h256, utils::storage_key_for_standard_token_balance, AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageValue, H160, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; +use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{ tracer::ValidationTraces, L1BatchEnv, L2BlockEnv, SystemEnv, TransactionExecutionMetrics, }; diff --git a/core/tests/test_account/src/lib.rs b/core/tests/test_account/src/lib.rs index cfb539c0e0f7..b8c79923a4e8 100644 --- a/core/tests/test_account/src/lib.rs +++ b/core/tests/test_account/src/lib.rs @@ -8,10 +8,11 @@ use zksync_system_constants::{ REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, }; use zksync_types::{ - abi, fee::Fee, l2::L2Tx, utils::deployed_address_create, Address, Execute, K256PrivateKey, - L2ChainId, Nonce, Transaction, H256, PRIORITY_OPERATION_L2_TX_TYPE, U256, + abi, address_to_u256, fee::Fee, h256_to_u256, l2::L2Tx, utils::deployed_address_create, + Address, Execute, K256PrivateKey, L2ChainId, Nonce, Transaction, H256, + PRIORITY_OPERATION_L2_TX_TYPE, U256, }; -use zksync_utils::{address_to_u256, bytecode::hash_bytecode, h256_to_u256}; +use zksync_utils::bytecode::hash_bytecode; pub const L1_TEST_GAS_PER_PUBDATA_BYTE: u32 = 800; const BASE_FEE: u64 = 2_000_000_000; diff --git a/core/tests/vm-benchmark/src/vm.rs b/core/tests/vm-benchmark/src/vm.rs index bf969e0de5c0..e082b9c24da2 100644 --- a/core/tests/vm-benchmark/src/vm.rs +++ b/core/tests/vm-benchmark/src/vm.rs @@ -14,7 +14,7 @@ use zksync_multivm::{ zk_evm_latest::ethereum_types::{Address, U256}, }; use zksync_types::{ - block::L2BlockHasher, fee_model::BatchFeeInput, helpers::unix_timestamp_ms, + block::L2BlockHasher, fee_model::BatchFeeInput, helpers::unix_timestamp_ms, u256_to_h256, utils::storage_key_for_eth_balance, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, }; @@ -29,7 +29,7 @@ static STORAGE: Lazy = Lazy::new(|| { // Give `PRIVATE_KEY` some money let balance = U256::from(10u32).pow(U256::from(32)); //10^32 wei let key = storage_key_for_eth_balance(&PRIVATE_KEY.address()); - storage.set_value(key, zksync_utils::u256_to_h256(balance)); + storage.set_value(key, u256_to_h256(balance)); storage }); diff --git a/prover/Cargo.lock b/prover/Cargo.lock index f119d4bd1951..3250c99deda6 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -8030,7 +8030,6 @@ dependencies = [ "sha2 0.10.8", "thiserror", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -8592,7 +8591,6 @@ version = "0.1.0" dependencies = [ "once_cell", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -8632,14 +8630,10 @@ name = "zksync_utils" version = "0.1.0" dependencies = [ "anyhow", - "bigdecimal", "const-decoder 0.4.0", "futures 0.3.30", - "hex", - "num", "once_cell", "reqwest 0.12.5", - "serde", "serde_json", "thiserror", "tokio", diff --git a/zkstack_cli/Cargo.lock b/zkstack_cli/Cargo.lock index a9089719714d..a582fff958f5 100644 --- a/zkstack_cli/Cargo.lock +++ b/zkstack_cli/Cargo.lock @@ -6895,7 +6895,6 @@ dependencies = [ "sha2", "thiserror", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -6970,7 +6969,6 @@ version = "0.1.0" dependencies = [ "once_cell", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -7010,14 +7008,10 @@ name = "zksync_utils" version = "0.1.0" dependencies = [ "anyhow", - "bigdecimal", "const-decoder", "futures", - "hex", - "num", "once_cell", "reqwest 0.12.8", - "serde", "serde_json", "thiserror", "tokio", From cc4cd8d139c34e82cae5b3627961f53f280997f1 Mon Sep 17 00:00:00 2001 From: D025 Date: Mon, 11 Nov 2024 13:22:33 +0200 Subject: [PATCH 11/23] ci: migrate release workflows to new reusable workflows (#3198) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Migrate build core and prover release workflows to new re-usable workflows ## Why ❔ Improve CI ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- .github/workflows/release-test-stage.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 18708420dab0..36a2b494c242 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -61,10 +61,11 @@ jobs: build-push-core-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/build-core-template.yml + uses: ./.github/workflows/new-build-core-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -84,10 +85,11 @@ jobs: build-push-contract-verifier: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/build-contract-verifier-template.yml + uses: ./.github/workflows/new-build-contract-verifier-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -95,12 +97,13 @@ jobs: build-push-prover-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/build-prover-template.yml + uses: ./.github/workflows/new-build-prover-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -108,13 +111,14 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push prover images with avx512 instructions needs: [setup, changed_files] - uses: ./.github/workflows/build-witness-generator-template.yml + uses: ./.github/workflows/new-build-witness-generator-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl " + action: push secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} From 8552d9cd35e6f70a7cd9310847087fdaf97180d5 Mon Sep 17 00:00:00 2001 From: Jacob Lindahl Date: Mon, 11 Nov 2024 07:24:12 -0600 Subject: [PATCH 12/23] chore: Refine loadtest configurations (#2649) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ This PR accomplishes two things: 1. Improves the precision with which the loadtest contract can be configured by splitting writes into two categories—initial and repeated—since the cost associated with each is different. 2. Introduces a set of standards which can be used to determine appropriate loadtest configurations. SQL queries are provided to assist in the calculations. ## Why ❔ Currently, the loadtests do not produce activity that closely emulates mainnet activity. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. (See https://github.com/matter-labs/zksync-era-workflows/pull/229) - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --------- Co-authored-by: Roman Brodetski --- bin/run_loadtest_from_github_actions | 11 ++-- core/lib/contracts/src/test_contracts.rs | 12 +++-- .../src/versions/testonly/rollbacks.rs | 6 ++- .../src/versions/vm_latest/tests/rollbacks.rs | 2 +- .../api_server/src/tx_sender/tests/call.rs | 3 +- .../src/tx_sender/tests/gas_estimation.rs | 2 +- .../api_server/src/tx_sender/tests/mod.rs | 3 +- .../state_keeper/src/executor/tests/mod.rs | 4 +- .../state_keeper/src/executor/tests/tester.rs | 11 ++-- core/tests/loadnext/Cargo.toml | 1 + core/tests/loadnext/README.md | 28 ++++++----- .../src/account/tx_command_executor.rs | 3 +- core/tests/vm-benchmark/src/transaction.rs | 21 ++++---- .../how_to_calculate_loadtest_profiles.md | 15 ++++++ .../contracts/loadnext/loadnext_contract.sol | 41 ++++++++++++--- .../loadnext/query_event_metrics.sql | 19 +++++++ .../loadnext/query_execution_info_metrics.sql | 20 ++++++++ .../query_max_transactions_in_window.sql | 23 +++++++++ .../loadnext/query_read_metrics_basic.sql | 39 +++++++++++++++ .../loadnext/query_write_metrics.sql | 50 +++++++++++++++++++ 20 files changed, 262 insertions(+), 52 deletions(-) create mode 100644 etc/contracts-test-data/contracts/loadnext/how_to_calculate_loadtest_profiles.md create mode 100644 etc/contracts-test-data/contracts/loadnext/query_event_metrics.sql create mode 100644 etc/contracts-test-data/contracts/loadnext/query_execution_info_metrics.sql create mode 100644 etc/contracts-test-data/contracts/loadnext/query_max_transactions_in_window.sql create mode 100644 etc/contracts-test-data/contracts/loadnext/query_read_metrics_basic.sql create mode 100644 etc/contracts-test-data/contracts/loadnext/query_write_metrics.sql diff --git a/bin/run_loadtest_from_github_actions b/bin/run_loadtest_from_github_actions index f784ddd3180d..149988d63d8f 100755 --- a/bin/run_loadtest_from_github_actions +++ b/bin/run_loadtest_from_github_actions @@ -11,11 +11,12 @@ export TRANSACTION_WEIGHTS_WITHDRAWAL=${weights[3]} read -ra execution_params <<<"$CONTRACT_EXECUTION_PARAMS" #reading $CONTRACT_EXECUTION_PARAMS as an array as tokens separated by IFS export CONTRACT_EXECUTION_PARAMS_READS=${execution_params[0]} -export CONTRACT_EXECUTION_PARAMS_WRITES=${execution_params[1]} -export CONTRACT_EXECUTION_PARAMS_EVENTS=${execution_params[2]} -export CONTRACT_EXECUTION_PARAMS_HASHES=${execution_params[3]} -export CONTRACT_EXECUTION_PARAMS_RECURSIVE_CALLS=${execution_params[4]} -export CONTRACT_EXECUTION_PARAMS_DEPLOYS=${execution_params[5]} +export CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=${execution_params[1]} +export CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=${execution_params[2]} +export CONTRACT_EXECUTION_PARAMS_EVENTS=${execution_params[3]} +export CONTRACT_EXECUTION_PARAMS_HASHES=${execution_params[4]} +export CONTRACT_EXECUTION_PARAMS_RECURSIVE_CALLS=${execution_params[5]} +export CONTRACT_EXECUTION_PARAMS_DEPLOYS=${execution_params[6]} # Run the test cargo run --bin loadnext diff --git a/core/lib/contracts/src/test_contracts.rs b/core/lib/contracts/src/test_contracts.rs index eab1587f8335..a670d930f049 100644 --- a/core/lib/contracts/src/test_contracts.rs +++ b/core/lib/contracts/src/test_contracts.rs @@ -6,7 +6,8 @@ use crate::get_loadnext_contract; #[derive(Debug, Clone, Deserialize)] pub struct LoadnextContractExecutionParams { pub reads: usize, - pub writes: usize, + pub initial_writes: usize, + pub repeated_writes: usize, pub events: usize, pub hashes: usize, pub recursive_calls: usize, @@ -21,7 +22,8 @@ impl LoadnextContractExecutionParams { pub fn empty() -> Self { Self { reads: 0, - writes: 0, + initial_writes: 0, + repeated_writes: 0, events: 0, hashes: 0, recursive_calls: 0, @@ -34,7 +36,8 @@ impl Default for LoadnextContractExecutionParams { fn default() -> Self { Self { reads: 10, - writes: 10, + initial_writes: 10, + repeated_writes: 10, events: 10, hashes: 10, recursive_calls: 1, @@ -50,7 +53,8 @@ impl LoadnextContractExecutionParams { let params = vec![ Token::Uint(U256::from(self.reads)), - Token::Uint(U256::from(self.writes)), + Token::Uint(U256::from(self.initial_writes)), + Token::Uint(U256::from(self.repeated_writes)), Token::Uint(U256::from(self.hashes)), Token::Uint(U256::from(self.events)), Token::Uint(U256::from(self.recursive_calls)), diff --git a/core/lib/multivm/src/versions/testonly/rollbacks.rs b/core/lib/multivm/src/versions/testonly/rollbacks.rs index cab3427899ea..08f0136717c4 100644 --- a/core/lib/multivm/src/versions/testonly/rollbacks.rs +++ b/core/lib/multivm/src/versions/testonly/rollbacks.rs @@ -105,7 +105,8 @@ pub(crate) fn test_vm_loadnext_rollbacks() { contract_address: Some(address), calldata: LoadnextContractExecutionParams { reads: 100, - writes: 100, + initial_writes: 100, + repeated_writes: 100, events: 100, hashes: 500, recursive_calls: 10, @@ -123,7 +124,8 @@ pub(crate) fn test_vm_loadnext_rollbacks() { contract_address: Some(address), calldata: LoadnextContractExecutionParams { reads: 100, - writes: 100, + initial_writes: 100, + repeated_writes: 100, events: 100, hashes: 500, recursive_calls: 10, diff --git a/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs b/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs index de674498427d..bb20670e5a6e 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs @@ -87,7 +87,7 @@ fn test_layered_rollback() { let loadnext_transaction = account.get_loadnext_transaction( address, LoadnextContractExecutionParams { - writes: 1, + initial_writes: 1, recursive_calls: 20, ..LoadnextContractExecutionParams::empty() }, diff --git a/core/node/api_server/src/tx_sender/tests/call.rs b/core/node/api_server/src/tx_sender/tests/call.rs index e43f55b2b9af..08571790e8eb 100644 --- a/core/node/api_server/src/tx_sender/tests/call.rs +++ b/core/node/api_server/src/tx_sender/tests/call.rs @@ -238,7 +238,8 @@ async fn eth_call_with_load_test_transactions() { }, LoadnextContractExecutionParams { reads: 100, - writes: 100, + initial_writes: 100, + repeated_writes: 100, ..LoadnextContractExecutionParams::empty() }, ]; diff --git a/core/node/api_server/src/tx_sender/tests/gas_estimation.rs b/core/node/api_server/src/tx_sender/tests/gas_estimation.rs index 7db1b8339314..5e0c67477ffe 100644 --- a/core/node/api_server/src/tx_sender/tests/gas_estimation.rs +++ b/core/node/api_server/src/tx_sender/tests/gas_estimation.rs @@ -116,7 +116,7 @@ async fn initial_estimate_for_deep_recursion(with_reads: bool) { (75, 1.2), (100, 1.4), (125, 1.7), - (150, 2.1), + (150, 2.2), ] }; for &(recursion_depth, multiplier) in depths_and_multipliers { diff --git a/core/node/api_server/src/tx_sender/tests/mod.rs b/core/node/api_server/src/tx_sender/tests/mod.rs index cbe405b2aa63..d0fe3126c9ad 100644 --- a/core/node/api_server/src/tx_sender/tests/mod.rs +++ b/core/node/api_server/src/tx_sender/tests/mod.rs @@ -18,7 +18,8 @@ const LOAD_TEST_CASES: TestCases = test_casing: LoadnextContractExecutionParams::default(), // No storage modification LoadnextContractExecutionParams { - writes: 0, + initial_writes: 0, + repeated_writes: 0, events: 0, ..LoadnextContractExecutionParams::default() }, diff --git a/core/node/state_keeper/src/executor/tests/mod.rs b/core/node/state_keeper/src/executor/tests/mod.rs index 04fb016ab639..62e9cd1968f1 100644 --- a/core/node/state_keeper/src/executor/tests/mod.rs +++ b/core/node/state_keeper/src/executor/tests/mod.rs @@ -296,7 +296,7 @@ async fn deploy_and_call_loadtest(vm_mode: FastVmMode) { ); assert_executed( &executor - .execute_tx(alice.loadnext_custom_writes_call(tx.address, 1, 500_000_000)) + .execute_tx(alice.loadnext_custom_initial_writes_call(tx.address, 1, 500_000_000)) .await .unwrap(), ); @@ -344,7 +344,7 @@ async fn execute_reverted_tx(vm_mode: FastVmMode) { assert_reverted( &executor - .execute_tx(alice.loadnext_custom_writes_call( + .execute_tx(alice.loadnext_custom_initial_writes_call( tx.address, 1, 1_000_000, // We provide enough gas for tx to be executed, but not enough for the call to be successful. )) diff --git a/core/node/state_keeper/src/executor/tests/tester.rs b/core/node/state_keeper/src/executor/tests/tester.rs index 247703c866e4..49f456e82917 100644 --- a/core/node/state_keeper/src/executor/tests/tester.rs +++ b/core/node/state_keeper/src/executor/tests/tester.rs @@ -335,7 +335,7 @@ pub trait AccountLoadNextExecutable { /// Returns an `execute` transaction with custom factory deps (which aren't used in a transaction, /// so they are mostly useful to test bytecode compression). fn execute_with_factory_deps(&mut self, factory_deps: Vec>) -> Transaction; - fn loadnext_custom_writes_call( + fn loadnext_custom_initial_writes_call( &mut self, address: Address, writes: u32, @@ -407,17 +407,17 @@ impl AccountLoadNextExecutable for Account { /// Returns a transaction to the loadnext contract with custom amount of write requests. /// Increments the account nonce. - fn loadnext_custom_writes_call( + fn loadnext_custom_initial_writes_call( &mut self, address: Address, - writes: u32, + initial_writes: u32, gas_limit: u32, ) -> Transaction { // For each iteration of the expensive contract, there are two slots that are updated: // the length of the vector and the new slot with the element itself. let minimal_fee = 2 * testonly::DEFAULT_GAS_PER_PUBDATA - * writes + * initial_writes * INITIAL_STORAGE_WRITE_PUBDATA_BYTES as u32; let fee = testonly::fee(minimal_fee + gas_limit); @@ -427,7 +427,8 @@ impl AccountLoadNextExecutable for Account { contract_address: Some(address), calldata: LoadnextContractExecutionParams { reads: 100, - writes: writes as usize, + initial_writes: initial_writes as usize, + repeated_writes: 100, events: 100, hashes: 100, recursive_calls: 0, diff --git a/core/tests/loadnext/Cargo.toml b/core/tests/loadnext/Cargo.toml index adb5c9eca429..9ceac7d5372a 100644 --- a/core/tests/loadnext/Cargo.toml +++ b/core/tests/loadnext/Cargo.toml @@ -9,6 +9,7 @@ license.workspace = true keywords.workspace = true categories.workspace = true publish = false +exclude = ["./dump"] [dependencies] zksync_types.workspace = true diff --git a/core/tests/loadnext/README.md b/core/tests/loadnext/README.md index 59288a7160ec..2556c1d9ca74 100644 --- a/core/tests/loadnext/README.md +++ b/core/tests/loadnext/README.md @@ -27,21 +27,22 @@ It: ## Transactions Parameters -The smart contract that is used for every l2 transaction can be found here: -`etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol`. +The smart contract that is used for every L2 transaction can be found here: +[`etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol`](../../../etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol). The `execute` function of the contract has the following parameters: -``` - function execute(uint reads, uint writes, uint hashes, uint events, uint max_recursion, uint deploys) external returns(uint) { +```solidity +function execute(uint reads, uint initialWrites, uint repeatedWrites, uint hashes, uint events, uint maxRecursion, uint deploys) external returns(uint) { ``` which correspond to the following configuration options: -``` +```rust pub struct LoadnextContractExecutionParams { pub reads: usize, - pub writes: usize, + pub initial_writes: usize, + pub repeated_writes: usize, pub events: usize, pub hashes: usize, pub recursive_calls: usize, @@ -51,8 +52,9 @@ pub struct LoadnextContractExecutionParams { For example, to simulate an average transaction on mainnet, one could do: -``` -CONTRACT_EXECUTION_PARAMS_WRITES=2 +```env +CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=2 +CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=2 CONTRACT_EXECUTION_PARAMS_READS=6 CONTRACT_EXECUTION_PARAMS_EVENTS=2 CONTRACT_EXECUTION_PARAMS_HASHES=10 @@ -62,8 +64,9 @@ CONTRACT_EXECUTION_PARAMS_DEPLOYS=0 Similarly, to simulate a lightweight transaction: -``` -CONTRACT_EXECUTION_PARAMS_WRITES=0 +```env +CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=0 +CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=0 CONTRACT_EXECUTION_PARAMS_READS=0 CONTRACT_EXECUTION_PARAMS_EVENTS=0 CONTRACT_EXECUTION_PARAMS_HASHES=0 @@ -86,10 +89,11 @@ Example invocation: - `MASTER_WALLET_PK` needs to be set to the private key of the master account. - `MAIN_TOKEN` needs to be set to the address of the token to be used for the loadtest. -``` +```sh cargo build -CONTRACT_EXECUTION_PARAMS_WRITES=2 \ +CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=2 \ +CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=2 \ CONTRACT_EXECUTION_PARAMS_READS=6 \ CONTRACT_EXECUTION_PARAMS_EVENTS=2 \ CONTRACT_EXECUTION_PARAMS_HASHES=10 \ diff --git a/core/tests/loadnext/src/account/tx_command_executor.rs b/core/tests/loadnext/src/account/tx_command_executor.rs index 2a916564fd61..4703d257cfd9 100644 --- a/core/tests/loadnext/src/account/tx_command_executor.rs +++ b/core/tests/loadnext/src/account/tx_command_executor.rs @@ -380,7 +380,8 @@ impl AccountLifespan { function .encode_input(&vec![ ethabi::Token::Uint(U256::from(self.contract_execution_params.reads)), - ethabi::Token::Uint(U256::from(self.contract_execution_params.writes)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.initial_writes)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.repeated_writes)), ethabi::Token::Uint(U256::from(self.contract_execution_params.hashes)), ethabi::Token::Uint(U256::from(self.contract_execution_params.events)), ethabi::Token::Uint(U256::from(self.contract_execution_params.recursive_calls)), diff --git a/core/tests/vm-benchmark/src/transaction.rs b/core/tests/vm-benchmark/src/transaction.rs index d5fedfa4df94..c625018fb9bf 100644 --- a/core/tests/vm-benchmark/src/transaction.rs +++ b/core/tests/vm-benchmark/src/transaction.rs @@ -12,7 +12,7 @@ use zksync_types::{ }; use zksync_utils::bytecode::hash_bytecode; -const LOAD_TEST_MAX_READS: usize = 100; +const LOAD_TEST_MAX_READS: usize = 3000; pub(crate) static PRIVATE_KEY: Lazy = Lazy::new(|| K256PrivateKey::from_bytes(H256([42; 32])).expect("invalid key bytes")); @@ -112,7 +112,7 @@ pub fn get_load_test_deploy_tx() -> Transaction { Some(CONTRACT_DEPLOYER_ADDRESS), create_calldata, Nonce(0), - tx_fee(100_000_000), + tx_fee(500_000_000), U256::zero(), L2ChainId::from(270), &PRIVATE_KEY, @@ -138,7 +138,8 @@ pub fn get_load_test_tx(nonce: u32, gas_limit: u32, params: LoadTestParams) -> T let calldata = execute_function .encode_input(&vec![ Token::Uint(U256::from(params.reads)), - Token::Uint(U256::from(params.writes)), + Token::Uint(U256::from(params.initial_writes)), + Token::Uint(U256::from(params.repeated_writes)), Token::Uint(U256::from(params.hashes)), Token::Uint(U256::from(params.events)), Token::Uint(U256::from(params.recursive_calls)), @@ -168,9 +169,10 @@ pub fn get_realistic_load_test_tx(nonce: u32) -> Transaction { nonce, 10_000_000, LoadTestParams { - reads: 30, - writes: 2, - events: 5, + reads: 243, + initial_writes: 1, + repeated_writes: 11, + events: 6, hashes: 10, recursive_calls: 0, deploys: 0, @@ -183,9 +185,10 @@ pub fn get_heavy_load_test_tx(nonce: u32) -> Transaction { nonce, 10_000_000, LoadTestParams { - reads: 100, - writes: 5, - events: 20, + reads: 296, + initial_writes: 13, + repeated_writes: 92, + events: 140, hashes: 100, recursive_calls: 20, deploys: 5, diff --git a/etc/contracts-test-data/contracts/loadnext/how_to_calculate_loadtest_profiles.md b/etc/contracts-test-data/contracts/loadnext/how_to_calculate_loadtest_profiles.md new file mode 100644 index 000000000000..5918c4f2308a --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/how_to_calculate_loadtest_profiles.md @@ -0,0 +1,15 @@ +# Calculating loadtest profiles + +Use the SQL scripts in this directory to calculate the characteristics of transactions within a miniblock range. + +Calculate `CONTRACT_EXECUTION_PARAMS` as follows: + +- `light`: all zeroes. +- `realistic`: median (50th percentile). +- `heavy`: generally use 2.5× the values in the 99th percentile. However, some operations are even less frequent than that (e.g. contract deployments). At the time of writing, contract deployments is set to 5. + +Metrics may be averaged across different block ranges to calculate a more holistic "characteristic." + +## Compensating for implicit activity + +The mere act of executing a transaction entails some ancillary activity on the network. For example, some events are emitted when tokens are transferred for gas payments. The loadtest contract does not compensate for this activity, so it should be kept in mind when evaluating loadtest activity. diff --git a/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol b/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol index b14286a45038..9186ff6180a2 100644 --- a/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol +++ b/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol @@ -8,25 +8,48 @@ contract LoadnextContract { uint[] readArray; uint[] writeArray; - constructor (uint reads) { + constructor(uint reads) { for (uint i = 0; i < reads; i++) { readArray.push(i); } } - function execute(uint reads, uint writes, uint hashes, uint events, uint max_recursion, uint deploys) external returns(uint) { - if (max_recursion > 0) { - return this.execute(reads, writes, hashes, events, max_recursion - 1, deploys); + function execute( + uint reads, + uint initialWrites, + uint repeatedWrites, + uint hashes, + uint events, + uint maxRecursion, + uint deploys + ) external returns (uint) { + if (maxRecursion > 0) { + return + this.execute( + reads, + initialWrites, + repeatedWrites, + hashes, + events, + maxRecursion - 1, + deploys + ); } + require(repeatedWrites <= readArray.length); uint sum = 0; // Somehow use result of storage read for compiler to not optimize this place. - for (uint i = 0; i < reads; i++) { + for (uint i = 0; i < repeatedWrites; i++) { + uint value = readArray[i]; + sum += value; + readArray[i] = value + 1; + } + for (uint i = repeatedWrites; i < reads; i++) { sum += readArray[i]; } - for (uint i = 0; i < writes; i++) { + for (uint i = 0; i < initialWrites; i++) { writeArray.push(i); } @@ -36,7 +59,9 @@ contract LoadnextContract { // Somehow use result of keccak for compiler to not optimize this place. for (uint i = 0; i < hashes; i++) { - sum += uint8(keccak256(abi.encodePacked("Message for encoding"))[0]); + sum += uint8( + keccak256(abi.encodePacked("Message for encoding"))[0] + ); } for (uint i = 0; i < deploys; i++) { @@ -47,7 +72,7 @@ contract LoadnextContract { function burnGas(uint256 gasToBurn) external { uint256 initialGas = gasleft(); - while(initialGas - gasleft() < gasToBurn) {} + while (initialGas - gasleft() < gasToBurn) {} } } diff --git a/etc/contracts-test-data/contracts/loadnext/query_event_metrics.sql b/etc/contracts-test-data/contracts/loadnext/query_event_metrics.sql new file mode 100644 index 000000000000..1a5d87d6fcfb --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/query_event_metrics.sql @@ -0,0 +1,19 @@ +-- calculate distribution of event emissions per transaction + +\set :start_from_miniblock_number 40000000 +\set :miniblock_range 10000 + +select stddev_samp(metric) as stddev, + avg(metric) as avg, + sum(metric) as sum, + min(metric) as min, + percentile_cont(0.01) within group (order by metric) as pct_01, + percentile_cont(0.50) within group (order by metric) as pct_50, + percentile_cont(0.99) within group (order by metric) as pct_99, + max(metric) as max +from (select tx.hash, count(ev.*) as metric + from transactions tx + left join events ev on ev.tx_hash = tx.hash + where ev.miniblock_number >= :start_from_miniblock_number + and ev.miniblock_number < :start_from_miniblock_number + :miniblock_range + group by tx.hash) s; diff --git a/etc/contracts-test-data/contracts/loadnext/query_execution_info_metrics.sql b/etc/contracts-test-data/contracts/loadnext/query_execution_info_metrics.sql new file mode 100644 index 000000000000..bf9faba4b6dc --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/query_execution_info_metrics.sql @@ -0,0 +1,20 @@ +-- calculate distribution of execution_info fields per transaction + +-- execution_info fields: gas_used, vm_events, cycles_used, storage_logs, l2_to_l1_logs, contracts_used, pubdata_published, total_log_queries, contracts_deployed, l2_l1_long_messages, computational_gas_used, published_bytecode_bytes +\set exection_info_field 'storage_logs' +\set start_from_miniblock_number 40000000 +\set miniblock_range 10000 + +select stddev_samp(metric) as stddev, + avg(metric) as avg, + sum(metric) as sum, + min(metric) as min, + percentile_cont(0.01) within group (order by metric) as pct_01, + percentile_cont(0.50) within group (order by metric) as pct_50, + percentile_cont(0.99) within group (order by metric) as pct_99, + max(metric) as max +from (select tx.miniblock_number, + (execution_info ->> :execution_info_field)::bigint as metric + from transactions tx) cd +where cd.miniblock_number >= :start_from_miniblock_number + and cd.miniblock_number < :start_from_miniblock_number + :miniblock_range; diff --git a/etc/contracts-test-data/contracts/loadnext/query_max_transactions_in_window.sql b/etc/contracts-test-data/contracts/loadnext/query_max_transactions_in_window.sql new file mode 100644 index 000000000000..91dd4bd47a7c --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/query_max_transactions_in_window.sql @@ -0,0 +1,23 @@ +-- not a metrics-collecting query, but may be useful to find an interesting range of transactions + +\set miniblock_number_range_start 36700000 +\set miniblock_number_range_end 36850000 +\set window_size 10000 +\set maximize_column l2_tx_count + +select miniblock_number_start, + miniblock_number_start + :window_size as miniblock_number_end, + metric_total +from (select mb.number as miniblock_number_start, + sum(mb.:maximize_column) + over lookahead + as metric_total + from miniblocks mb + where mb.number >= :miniblock_number_range_start + and mb.number < :miniblock_number_range_end + window lookahead as ( + order by mb.number + rows between current row and :window_size following + )) _s +order by metric_total desc +limit 10; diff --git a/etc/contracts-test-data/contracts/loadnext/query_read_metrics_basic.sql b/etc/contracts-test-data/contracts/loadnext/query_read_metrics_basic.sql new file mode 100644 index 000000000000..62195016f10e --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/query_read_metrics_basic.sql @@ -0,0 +1,39 @@ +-- calculate distribution of storage reads per transaction +-- does not calculate hot/cold reads + +\set start_from_miniblock_number 40000000 +\set miniblock_range 10000 + +with mb as (select * + from miniblocks mb + where mb.number >= :start_from_miniblock_number + order by mb.number + limit :miniblock_range) +select stddev_samp(metric) as stddev, + avg(metric) as avg, + sum(metric) as sum, + min(metric) as min, + percentile_cont(0.01) within group (order by metric) as pct_01, + percentile_cont(0.50) within group (order by metric) as pct_50, + percentile_cont(0.99) within group (order by metric) as pct_99, + max(metric) as max +from (select miniblock_number, + (sum(read_write_logs) - sum(write_logs)) / sum(transaction_count) as metric, + sum(transaction_count) as transaction_count + from (select mb.number as miniblock_number, + (tx.execution_info ->> 'storage_logs')::bigint as read_write_logs, + null as write_logs, + 1 as transaction_count + from transactions tx, + mb + where tx.miniblock_number = mb.number + union + select mb.number as miniblock_number, + null as read_write_logs, + count(sl.*) as write_logs, + 0 as transaction_count + from storage_logs sl, + mb + where sl.miniblock_number = mb.number + group by mb.number) s + group by s.miniblock_number) t, generate_series(1, t.transaction_count); diff --git a/etc/contracts-test-data/contracts/loadnext/query_write_metrics.sql b/etc/contracts-test-data/contracts/loadnext/query_write_metrics.sql new file mode 100644 index 000000000000..f142347f9801 --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/query_write_metrics.sql @@ -0,0 +1,50 @@ +-- calculate distribution of initial and repeated writes per transaction + +\set start_from_miniblock_number 40000000; +\set miniblock_range 10000; + +select + -- initial writes + stddev_samp(initial_writes_per_tx) as initial_writes_stddev, + avg(initial_writes_per_tx) as initial_writes_avg, + min(initial_writes_per_tx) as initial_writes_min, + percentile_cont(0.01) within group (order by initial_writes_per_tx) as initial_writes_pct_01, + percentile_cont(0.50) within group (order by initial_writes_per_tx) as initial_writes_pct_50, + percentile_cont(0.99) within group (order by initial_writes_per_tx) as initial_writes_pct_99, + max(initial_writes_per_tx) as initial_writes_max, + + -- repeated writes + stddev_samp(repeated_writes_per_tx) as repeated_writes_stddev, + avg(repeated_writes_per_tx) as repeated_writes_avg, + min(repeated_writes_per_tx) as repeated_writes_min, + percentile_cont(0.01) within group (order by repeated_writes_per_tx) as repeated_writes_pct_01, + percentile_cont(0.50) within group (order by repeated_writes_per_tx) as repeated_writes_pct_50, + percentile_cont(0.99) within group (order by repeated_writes_per_tx) as repeated_writes_pct_99, + max(repeated_writes_per_tx) as repeated_writes_max +from (select initial_writes::real / l2_tx_count::real as initial_writes_per_tx, + (total_writes - initial_writes)::real / l2_tx_count::real as repeated_writes_per_tx + from (select mb.number as miniblock_number, + count(sl.hashed_key) as total_writes, + count(distinct sl.hashed_key) filter ( + where + iw.hashed_key is not null + ) as initial_writes, + mb.l2_tx_count as l2_tx_count + from miniblocks mb + join l1_batches l1b on l1b.number = mb.l1_batch_number + join storage_logs sl on sl.miniblock_number = mb.number + left join initial_writes iw on iw.hashed_key = sl.hashed_key + and iw.l1_batch_number = mb.l1_batch_number + and mb.number = ( + -- initial writes are only tracked by l1 batch number, so find the first miniblock in that batch that contains a write to that key + select miniblock_number + from storage_logs + where hashed_key = sl.hashed_key + order by miniblock_number + limit 1) + where mb.l2_tx_count <> 0 -- avoid div0 + and mb.number >= :start_from_miniblock_number + group by mb.number + order by mb.number desc + limit :miniblock_range) s, generate_series(1, s.l2_tx_count) -- scale by # of tx + ) t; From 8b31a85d47aae4c28868c76ce320ecedcd4bb3ad Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Mon, 11 Nov 2024 17:09:56 +0200 Subject: [PATCH 13/23] test(merkle-tree): Fix RocksDB termination in test (#3249) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Fixes RocksDB termination in a unit test. ## Why ❔ Without such a termination, the test sporadically fails with the "terminate called without an active exception" message. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/node/metadata_calculator/src/recovery/tests.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/core/node/metadata_calculator/src/recovery/tests.rs b/core/node/metadata_calculator/src/recovery/tests.rs index 1d83c2f06031..4b2ba578a5b6 100644 --- a/core/node/metadata_calculator/src/recovery/tests.rs +++ b/core/node/metadata_calculator/src/recovery/tests.rs @@ -15,6 +15,7 @@ use zksync_health_check::{CheckHealth, HealthStatus, ReactiveHealthCheck}; use zksync_merkle_tree::{domain::ZkSyncTree, recovery::PersistenceThreadHandle, TreeInstruction}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::prepare_recovery_snapshot; +use zksync_storage::RocksDB; use zksync_types::{L1BatchNumber, U256}; use super::*; @@ -543,4 +544,9 @@ async fn pruning_during_recovery_is_detected() { .unwrap_err(); let err = format!("{err:#}").to_lowercase(); assert!(err.contains("continuing recovery is impossible"), "{err}"); + + // Because of an abrupt error, terminating a RocksDB instance needs to be handled explicitly. + tokio::task::spawn_blocking(RocksDB::await_rocksdb_termination) + .await + .unwrap(); } From 5cea9ec6298cbe57bcd09553fd7f52e9c2566d44 Mon Sep 17 00:00:00 2001 From: Yury Akudovich Date: Mon, 11 Nov 2024 22:32:39 +0100 Subject: [PATCH 14/23] chore: Add README.md with basic information about Prover Autoscaler (#3241) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ ## Why ❔ ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. ref ZKD-1855 --------- Co-authored-by: EmilLuta --- prover/crates/bin/prover_autoscaler/README.md | 234 ++++++++++++++++++ 1 file changed, 234 insertions(+) create mode 100644 prover/crates/bin/prover_autoscaler/README.md diff --git a/prover/crates/bin/prover_autoscaler/README.md b/prover/crates/bin/prover_autoscaler/README.md new file mode 100644 index 000000000000..6a0ff84f88f2 --- /dev/null +++ b/prover/crates/bin/prover_autoscaler/README.md @@ -0,0 +1,234 @@ +# Prover Autoscaler + +Prover Autoscaler is needed to automatically scale Prover related Kubernetes Deployments according to the load in a +cluster with higher chances to get Nodes to run. If the cluster runs out of resources it moves the load to next one. + +## Design + +Prover Autoscaler has the main Scaler part and Agents running in each cluster. + +### Agent + +Agents watch via Kubernetes API status of Deployments, Pods and out of resources Events; perform scaling by requests +from Scaler. They watch only specified in config namespaces. Agent provides listens on 2 ports: `prometheus_port` to +export metrics (path is `/metrics`), and `http_port` with 3 paths: `/healthz`, `/cluster` to get the cluster status and +`/scale` to scale Deployments up or down. + +### Scaler + +Scaler collects cluster statuses from Agents, job queues from prover-job-monitor, calculates needed number of replicas +and sends scale requests to Agents. + +Requests flow diagram: + +```mermaid +sequenceDiagram + participant prover-job-monitor + participant Scaler + box cluster1 + participant Agent1 + participant K8s API1 + end + box cluster2 + participant Agent2 + participant K8s API2 + end + loop Watch + Agent1->>K8s API1: Watch namespaces + end + loop Watch + Agent2->>K8s API2: Watch namespaces + end + loop Recalculate + Scaler->>prover-job-monitor: /report + Scaler->>Agent1: /cluster + Scaler->>Agent2: /cluster + Scaler->>Agent1: /scale + end +``` + +Scaler supports 2 types of scaling algorithms: GPU and Simple. GPU usually is prover itself and all other Deployments +are using Simple algorithm. + +Simple algorithm tries to scale the Deployment up to `queue / speed` replicas (rounded up) in the best cluster. If there +is not enough capacity it continue in the next best cluster and so on. On each run it selects "best cluster" using +priority, number of capacity issues and cluster size. The capacity is limited by config (`max_provers` or +`max_replicas`) and also by availability of machines in the cluster. Autoscaler detects that a cluster is running out of +particular machines by watching for `FailedScaleUp` events and also by checking if a Pod stuck in Pending for longer +than `long_pending_duration`. If not enough capacity is detected not running Pods will be moved. + +GPU algorithm works similar to Simple one, but it also recognise different GPU types and distribute load across L4 GPUs +first, then T4, V100, P100 and A100, if available. + +Different namespaces are running different protocol versions and completely independent. Normally only one namespace is +active, and only during protocol upgrade both are active. Each namespace has to have correct version of binaries +installed, see `protocol_versions` config option. + +## Dependencies + +- [prover-job-monitor](.../prover_job_monitor/) +- Kubernetes API +- GCP API (optional) + +## Permissions + +Agents need the following Kubernetes permissions: + +```yaml +- apiGroups: + - '' + resources: + - pods + - events + - namespaces + - nodes + verbs: + - get + - watch + - list +- apiGroups: + - apps + resources: + - deployments + - replicasets + verbs: + - get + - list + - watch + - patch + - update +``` + +## Configuration + +Prover Autoscaler requires a config file provided via `--config-path` flag, supported format: YAML. Also you need to +specify which job to run Scaler or Agent using `--job=scaler` or `--job=agent` flag correspondingly. + +### Common configuration + +- `graceful_shutdown_timeout` is time to wait for all the task to finish before force shutdown. Default: 5s. +- `observability` section configures type of `log_format` (`plain` or `json`) and log levels per module with + `log_directives`. + +Example: + +```yaml +graceful_shutdown_timeout: 5s +observability: + log_format: plain + log_directives: 'zksync_prover_autoscaler=debug' +``` + +### Agent configuration + +`agent_config` section configures Agent parameters: + +- `prometheus_port` is a port for Prometheus metrics to be served on (path is `/metrics`). +- `http_port` is the main port for Scaler to connect to. +- `namespaces` is list of namespaces to watch. +- `dry_run` if enabled, Agent will not change number of replicas, just report success. Default: true. + +Example: + +```yaml +agent_config: + prometheus_port: 8080 + http_port: 8081 + namespaces: + - prover-old + - prover-new + dry_run: true +``` + +### Scaler configuration + +`scaler_config` section configures Scaler parameters: + +- `dry_run` if enabled, Scaler will not send any scaler requests. Default: false. +- `prometheus_port` is a port for Prometheus metrics to be served on (path is `/metrics`). +- `prover_job_monitor_url` is full URL to get queue report from prover-job-monitor. +- `agents` is Agent list to send requests to. +- `scaler_run_interval` is interval between re-calculations. Default: 10s. +- `protocol_versions` is a map namespaces to protocol version it processes. Should correspond binary versions running + there! +- `cluster_priorities` is a map cluster name to priority, the lower will be used first. +- `min_provers` is a map namespace to minimum number of provers to run even if the queue is empty. +- `max_provers` is a map of cluster name to map GPU type to maximum number of provers. +- `prover_speed` is a map GPU to speed divider. Default: 500. +- `long_pending_duration` is time after a pending pod considered long pending and will be relocated to different + cluster. Default: 10m. +- `scaler_targets` subsection is a list of Simple targets: + - `queue_report_field` is name of corresponding queue report section. See example for possible options. + - `deployment` is name of a Deployment to scale. + - `max_replicas` is a map of cluster name to maximum number of replicas. + - `speed` is a divider for corresponding queue. + +Example: + +```yaml +scaler_config: + dry_run: true + prometheus_port: 8082 + prover_job_monitor_url: http://prover-job-monitor.default.svc.cluster.local:3074/queue_report + agents: + - http://prover-autoscaler-agent.cluster1.com + - http://prover-autoscaler-agent.cluster2.com + - http://prover-autoscaler-agent.cluster3.com + scaler_run_interval: 30s + protocol_versions: + prover-old: 0.24.2 + prover-new: 0.25.0 + cluster_priorities: + cluster1: 0 + cluster2: 100 + cluster3: 200 + min_provers: + prover-new: 0 + max_provers: + cluster1: + L4: 1 + T4: 200 + cluster2: + L4: 100 + T4: 200 + cluster3: + L4: 100 + T4: 100 + prover_speed: + L4: 500 + T4: 400 + long_pending_duration: 10m + scaler_targets: + - queue_report_field: basic_witness_jobs + deployment: witness-generator-basic-fri + max_replicas: + cluster1: 10 + cluster2: 20 + speed: 10 + - queue_report_field: leaf_witness_jobs + deployment: witness-generator-leaf-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: node_witness_jobs + deployment: witness-generator-node-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: recursion_tip_witness_jobs + deployment: witness-generator-recursion-tip-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: scheduler_witness_jobs + deployment: witness-generator-scheduler-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: proof_compressor_jobs + deployment: proof-fri-gpu-compressor + max_replicas: + cluster1: 10 + cluster2: 10 + speed: 5 +``` From 6e3c36e6426621bee82399db7814ca6756b613cb Mon Sep 17 00:00:00 2001 From: Dima Zhornyk <55756184+dimazhornyk@users.noreply.github.com> Date: Tue, 12 Nov 2024 10:46:31 +0700 Subject: [PATCH 15/23] fix: use_dummy_inclusion_data condition (#3244) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Fix invalid `if` usage for `use_dummy_inclusion_data` flag. ## Why ❔ ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/node/da_dispatcher/src/da_dispatcher.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/core/node/da_dispatcher/src/da_dispatcher.rs b/core/node/da_dispatcher/src/da_dispatcher.rs index f8e6f6b31723..2cdde9951be9 100644 --- a/core/node/da_dispatcher/src/da_dispatcher.rs +++ b/core/node/da_dispatcher/src/da_dispatcher.rs @@ -137,6 +137,8 @@ impl DataAvailabilityDispatcher { }; let inclusion_data = if self.config.use_dummy_inclusion_data() { + Some(InclusionData { data: vec![] }) + } else { self.client .get_inclusion_data(blob_info.blob_id.as_str()) .await @@ -146,10 +148,6 @@ impl DataAvailabilityDispatcher { blob_info.blob_id, blob_info.l1_batch_number ) })? - } else { - // if the inclusion verification is disabled, we don't need to wait for the inclusion - // data before committing the batch, so simply return an empty vector - Some(InclusionData { data: vec![] }) }; let Some(inclusion_data) = inclusion_data else { From 8be0c65f398b4c0735bffe1e363d856a78fb61d6 Mon Sep 17 00:00:00 2001 From: D025 Date: Tue, 12 Nov 2024 10:29:08 +0200 Subject: [PATCH 16/23] ci: migrate docker from tag workflow to new templates (#3250) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Update workflows ## Why ❔ For optimize CI/CD time ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- .github/workflows/build-docker-from-tag.yml | 12 ++++++++---- .github/workflows/ci.yml | 1 + .github/workflows/new-build-prover-template.yml | 1 + .../new-build-witness-generator-template.yml | 6 ++++++ 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index 206e15bd195f..e2788064efef 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -49,7 +49,7 @@ jobs: build-push-core-images: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-core-template.yml + uses: ./.github/workflows/new-build-core-template.yml if: contains(github.ref_name, 'core') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -57,6 +57,7 @@ jobs: with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} en_alpha_release: true + action: "push" build-push-tee-prover-images: name: Build and push images @@ -73,23 +74,25 @@ jobs: build-push-contract-verifier: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-contract-verifier-template.yml + uses: ./.github/workflows/new-build-contract-verifier-template.yml if: contains(github.ref_name, 'contract_verifier') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + action: "push" build-push-prover-images: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-prover-template.yml + uses: ./.github/workflows/new-build-prover-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -97,13 +100,14 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-witness-generator-template.yml + uses: ./.github/workflows/new-build-witness-generator-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" + action: push secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2f29fe98f0e6..6dc91c6d82d1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -178,6 +178,7 @@ jobs: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 action: "build" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" + ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml index 046711d679e8..3a44fa807152 100644 --- a/.github/workflows/new-build-prover-template.yml +++ b/.github/workflows/new-build-prover-template.yml @@ -158,6 +158,7 @@ jobs: SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE + ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} RUSTC_WRAPPER=sccache file: docker/${{ matrix.components }}/Dockerfile tags: | diff --git a/.github/workflows/new-build-witness-generator-template.yml b/.github/workflows/new-build-witness-generator-template.yml index 2f1fc0b2dd86..5644215f4352 100644 --- a/.github/workflows/new-build-witness-generator-template.yml +++ b/.github/workflows/new-build-witness-generator-template.yml @@ -9,6 +9,10 @@ on: description: "DOCKERHUB_TOKEN" required: true inputs: + ERA_BELLMAN_CUDA_RELEASE: + description: "ERA_BELLMAN_CUDA_RELEASE" + type: string + required: true image_tag_suffix: description: "Optional suffix to override tag name generation" type: string @@ -127,6 +131,8 @@ jobs: SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE RUSTC_WRAPPER=sccache + ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} + RUST_FLAGS=${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} file: docker/${{ matrix.components }}/Dockerfile tags: | us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} From 363b4f09937496fadeb38857f5c0c73146995ce5 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Tue, 12 Nov 2024 11:20:45 +0200 Subject: [PATCH 17/23] fix(merkle-tree): Repair stale keys for tree in background (#3200) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Implements a background task to remove bogus stale keys for the Merkle tree. ## Why ❔ These keys could have been produced during L1 batch reverts before https://github.com/matter-labs/zksync-era/pull/3178. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/bin/external_node/src/config/mod.rs | 9 + core/bin/external_node/src/node_builder.rs | 5 + core/lib/config/src/configs/experimental.rs | 4 + core/lib/config/src/testonly.rs | 1 + core/lib/env_config/src/database.rs | 4 + core/lib/merkle_tree/src/domain.rs | 5 + core/lib/merkle_tree/src/lib.rs | 16 + core/lib/merkle_tree/src/pruning.rs | 39 +- core/lib/merkle_tree/src/repair.rs | 376 ++++++++++++++++++ core/lib/merkle_tree/src/storage/rocksdb.rs | 98 ++++- .../merkle_tree/src/storage/serialization.rs | 13 + core/lib/merkle_tree/src/utils.rs | 43 ++ core/lib/protobuf_config/src/experimental.rs | 8 +- .../src/proto/config/experimental.proto | 3 +- .../src/api_server/metrics.rs | 1 + .../metadata_calculator/src/api_server/mod.rs | 15 + .../src/api_server/tests.rs | 15 + core/node/metadata_calculator/src/helpers.rs | 14 + core/node/metadata_calculator/src/lib.rs | 7 + core/node/metadata_calculator/src/repair.rs | 258 ++++++++++++ .../layers/metadata_calculator.rs | 30 +- 21 files changed, 923 insertions(+), 41 deletions(-) create mode 100644 core/lib/merkle_tree/src/repair.rs create mode 100644 core/node/metadata_calculator/src/repair.rs diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index 0a94f993656a..81604f83008a 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -408,6 +408,9 @@ pub(crate) struct OptionalENConfig { /// Timeout to wait for the Merkle tree database to run compaction on stalled writes. #[serde(default = "OptionalENConfig::default_merkle_tree_stalled_writes_timeout_sec")] merkle_tree_stalled_writes_timeout_sec: u64, + /// Enables the stale keys repair task for the Merkle tree. + #[serde(default)] + pub merkle_tree_repair_stale_keys: bool, // Postgres config (new parameters) /// Threshold in milliseconds for the DB connection lifetime to denote it as long-living and log its details. @@ -639,6 +642,12 @@ impl OptionalENConfig { merkle_tree.stalled_writes_timeout_sec, default_merkle_tree_stalled_writes_timeout_sec ), + merkle_tree_repair_stale_keys: general_config + .db_config + .as_ref() + .map_or(false, |config| { + config.experimental.merkle_tree_repair_stale_keys + }), database_long_connection_threshold_ms: load_config!( general_config.postgres_config, long_connection_threshold_ms diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index b7f6f8039025..5c70fd436781 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -378,6 +378,11 @@ impl ExternalNodeBuilder { layer = layer.with_tree_api_config(merkle_tree_api_config); } + // Add stale keys repair task if requested. + if self.config.optional.merkle_tree_repair_stale_keys { + layer = layer.with_stale_keys_repair(); + } + // Add tree pruning if needed. if self.config.optional.pruning_enabled { layer = layer.with_pruning_config(self.config.optional.pruning_removal_delay()); diff --git a/core/lib/config/src/configs/experimental.rs b/core/lib/config/src/configs/experimental.rs index a87a221ef222..2553864e251d 100644 --- a/core/lib/config/src/configs/experimental.rs +++ b/core/lib/config/src/configs/experimental.rs @@ -29,6 +29,9 @@ pub struct ExperimentalDBConfig { /// correspondingly; otherwise, RocksDB performance can significantly degrade. #[serde(default)] pub include_indices_and_filters_in_block_cache: bool, + /// Enables the stale keys repair task for the Merkle tree. + #[serde(default)] + pub merkle_tree_repair_stale_keys: bool, } impl Default for ExperimentalDBConfig { @@ -40,6 +43,7 @@ impl Default for ExperimentalDBConfig { protective_reads_persistence_enabled: false, processing_delay_ms: Self::default_merkle_tree_processing_delay_ms(), include_indices_and_filters_in_block_cache: false, + merkle_tree_repair_stale_keys: false, } } } diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 93d502cc4e8a..c24d47f27b33 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -305,6 +305,7 @@ impl Distribution for EncodeDist { protective_reads_persistence_enabled: self.sample(rng), processing_delay_ms: self.sample(rng), include_indices_and_filters_in_block_cache: self.sample(rng), + merkle_tree_repair_stale_keys: self.sample(rng), } } } diff --git a/core/lib/env_config/src/database.rs b/core/lib/env_config/src/database.rs index 119d64b7738c..ae4c3059ce32 100644 --- a/core/lib/env_config/src/database.rs +++ b/core/lib/env_config/src/database.rs @@ -88,6 +88,7 @@ mod tests { DATABASE_MERKLE_TREE_MAX_L1_BATCHES_PER_ITER=50 DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_BLOCK_CACHE_CAPACITY_MB=64 DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_MAX_OPEN_FILES=100 + DATABASE_EXPERIMENTAL_MERKLE_TREE_REPAIR_STALE_KEYS=true "#; lock.set_env(config); @@ -109,6 +110,7 @@ mod tests { db_config.experimental.state_keeper_db_max_open_files, NonZeroU32::new(100) ); + assert!(db_config.experimental.merkle_tree_repair_stale_keys); } #[test] @@ -118,6 +120,7 @@ mod tests { "DATABASE_STATE_KEEPER_DB_PATH", "DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_MAX_OPEN_FILES", "DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_BLOCK_CACHE_CAPACITY_MB", + "DATABASE_EXPERIMENTAL_MERKLE_TREE_REPAIR_STALE_KEYS", "DATABASE_MERKLE_TREE_BACKUP_PATH", "DATABASE_MERKLE_TREE_PATH", "DATABASE_MERKLE_TREE_MODE", @@ -144,6 +147,7 @@ mod tests { 128 ); assert_eq!(db_config.experimental.state_keeper_db_max_open_files, None); + assert!(!db_config.experimental.merkle_tree_repair_stale_keys); // Check that new env variable for Merkle tree path is supported lock.set_env("DATABASE_MERKLE_TREE_PATH=/db/tree/main"); diff --git a/core/lib/merkle_tree/src/domain.rs b/core/lib/merkle_tree/src/domain.rs index 5064c791ed5b..5265f93264f2 100644 --- a/core/lib/merkle_tree/src/domain.rs +++ b/core/lib/merkle_tree/src/domain.rs @@ -410,6 +410,11 @@ impl ZkSyncTreeReader { &self.0.db } + /// Converts this reader to the underlying DB. + pub fn into_db(self) -> RocksDBWrapper { + self.0.db + } + /// Returns the root hash and leaf count at the specified L1 batch. pub fn root_info(&self, l1_batch_number: L1BatchNumber) -> Option<(ValueHash, u64)> { let root = self.0.root(l1_batch_number.0.into())?; diff --git a/core/lib/merkle_tree/src/lib.rs b/core/lib/merkle_tree/src/lib.rs index 5e97d6d77c69..1782f373954c 100644 --- a/core/lib/merkle_tree/src/lib.rs +++ b/core/lib/merkle_tree/src/lib.rs @@ -71,6 +71,7 @@ mod hasher; mod metrics; mod pruning; pub mod recovery; +pub mod repair; mod storage; mod types; mod utils; @@ -200,6 +201,21 @@ impl MerkleTree { root.unwrap_or(Root::Empty) } + /// Incorrect version of [`Self::truncate_recent_versions()`] that doesn't remove stale keys for the truncated tree versions. + #[cfg(test)] + fn truncate_recent_versions_incorrectly( + &mut self, + retained_version_count: u64, + ) -> anyhow::Result<()> { + let mut manifest = self.db.manifest().unwrap_or_default(); + if manifest.version_count > retained_version_count { + manifest.version_count = retained_version_count; + let patch = PatchSet::from_manifest(manifest); + self.db.apply_patch(patch)?; + } + Ok(()) + } + /// Extends this tree by creating its new version. /// /// # Return value diff --git a/core/lib/merkle_tree/src/pruning.rs b/core/lib/merkle_tree/src/pruning.rs index 2e328d0a2bb5..ae8300b893ab 100644 --- a/core/lib/merkle_tree/src/pruning.rs +++ b/core/lib/merkle_tree/src/pruning.rs @@ -250,6 +250,7 @@ mod tests { use super::*; use crate::{ types::{Node, NodeKey}, + utils::testonly::setup_tree_with_stale_keys, Database, Key, MerkleTree, PatchSet, RocksDBWrapper, TreeEntry, ValueHash, }; @@ -507,47 +508,17 @@ mod tests { test_keys_are_removed_by_pruning_when_overwritten_in_multiple_batches(true); } - fn test_pruning_with_truncation(db: impl PruneDatabase) { - let mut tree = MerkleTree::new(db).unwrap(); - let kvs: Vec<_> = (0_u64..100) - .map(|i| TreeEntry::new(Key::from(i), i + 1, ValueHash::zero())) - .collect(); - tree.extend(kvs).unwrap(); - - let overridden_kvs = vec![TreeEntry::new( - Key::from(0), - 1, - ValueHash::repeat_byte(0xaa), - )]; - tree.extend(overridden_kvs).unwrap(); - - let stale_keys = tree.db.stale_keys(1); - assert!( - stale_keys.iter().any(|key| !key.is_empty()), - "{stale_keys:?}" - ); - - // Revert `overridden_kvs`. - tree.truncate_recent_versions(1).unwrap(); - assert_eq!(tree.latest_version(), Some(0)); - let future_stale_keys = tree.db.stale_keys(1); - assert!(future_stale_keys.is_empty()); - - // Add a new version without the key. To make the matter more egregious, the inserted key - // differs from all existing keys, starting from the first nibble. - let new_key = Key::from_big_endian(&[0xaa; 32]); - let new_kvs = vec![TreeEntry::new(new_key, 101, ValueHash::repeat_byte(0xaa))]; - tree.extend(new_kvs).unwrap(); - assert_eq!(tree.latest_version(), Some(1)); + fn test_pruning_with_truncation(mut db: impl PruneDatabase) { + setup_tree_with_stale_keys(&mut db, false); - let stale_keys = tree.db.stale_keys(1); + let stale_keys = db.stale_keys(1); assert_eq!(stale_keys.len(), 1); assert!( stale_keys[0].is_empty() && stale_keys[0].version == 0, "{stale_keys:?}" ); - let (mut pruner, _) = MerkleTreePruner::new(tree.db); + let (mut pruner, _) = MerkleTreePruner::new(db); let prunable_version = pruner.last_prunable_version().unwrap(); assert_eq!(prunable_version, 1); let stats = pruner diff --git a/core/lib/merkle_tree/src/repair.rs b/core/lib/merkle_tree/src/repair.rs new file mode 100644 index 000000000000..c83569e96b13 --- /dev/null +++ b/core/lib/merkle_tree/src/repair.rs @@ -0,0 +1,376 @@ +//! Service tasks for the Merkle tree. + +use std::{ + ops, + sync::{mpsc, Arc, Mutex}, + time::{Duration, Instant}, +}; + +use anyhow::Context as _; +use rayon::prelude::*; + +use crate::{ + types::{NodeKey, StaleNodeKey}, + Database, PruneDatabase, RocksDBWrapper, +}; + +/// Persisted information about stale keys repair progress. +#[derive(Debug)] +pub(crate) struct StaleKeysRepairData { + pub next_version: u64, +} + +/// [`StaleKeysRepairTask`] progress stats. +#[derive(Debug, Clone, Default)] +pub struct StaleKeysRepairStats { + /// Versions checked by the task, or `None` if no versions have been checked. + pub checked_versions: Option>, + /// Number of repaired stale keys. + pub repaired_key_count: usize, +} + +#[derive(Debug)] +struct StepStats { + checked_versions: ops::RangeInclusive, + repaired_key_count: usize, +} + +/// Handle for a [`StaleKeysRepairTask`] allowing to abort its operation. +/// +/// The task is aborted once the handle is dropped. +#[must_use = "Paired `StaleKeysRepairTask` is aborted once handle is dropped"] +#[derive(Debug)] +pub struct StaleKeysRepairHandle { + stats: Arc>, + _aborted_sender: mpsc::Sender<()>, +} + +impl StaleKeysRepairHandle { + /// Returns stats for the paired task. + #[allow(clippy::missing_panics_doc)] // mutex poisoning shouldn't happen + pub fn stats(&self) -> StaleKeysRepairStats { + self.stats.lock().expect("stats mutex poisoned").clone() + } +} + +/// Task that repairs stale keys for the tree. +/// +/// Early tree versions contained a bug: If a tree version was truncated, stale keys for it remained intact. +/// If an overwritten tree version did not contain the same keys, this could lead to keys incorrectly marked as stale, +/// meaning that after pruning, a tree may end up broken. +#[derive(Debug)] +pub struct StaleKeysRepairTask { + db: RocksDBWrapper, + parallelism: u64, + poll_interval: Duration, + stats: Arc>, + aborted_receiver: mpsc::Receiver<()>, +} + +impl StaleKeysRepairTask { + /// Creates a new task. + pub fn new(db: RocksDBWrapper) -> (Self, StaleKeysRepairHandle) { + let (aborted_sender, aborted_receiver) = mpsc::channel(); + let stats = Arc::>::default(); + let this = Self { + db, + parallelism: (rayon::current_num_threads() as u64).max(1), + poll_interval: Duration::from_secs(60), + stats: stats.clone(), + aborted_receiver, + }; + let handle = StaleKeysRepairHandle { + stats, + _aborted_sender: aborted_sender, + }; + (this, handle) + } + + /// Sets the poll interval for this task. + pub fn set_poll_interval(&mut self, poll_interval: Duration) { + self.poll_interval = poll_interval; + } + + /// Runs stale key detection for a single tree version. + #[tracing::instrument(skip(db))] + pub fn bogus_stale_keys(db: &RocksDBWrapper, version: u64) -> Vec { + const SAMPLE_COUNT: usize = 5; + + let version_keys = db.all_keys_for_version(version).unwrap_or_else(|err| { + panic!("failed loading keys changed in tree version {version}: {err}") + }); + let stale_keys = db.stale_keys(version); + + if !version_keys.unreachable_keys.is_empty() { + let keys_sample: Vec<_> = version_keys + .unreachable_keys + .iter() + .take(SAMPLE_COUNT) + .collect::>(); + tracing::warn!( + version, + unreachable_keys.len = version_keys.unreachable_keys.len(), + unreachable_keys.sample = ?keys_sample, + "Found unreachable keys in tree" + ); + } + + let mut bogus_stale_keys = vec![]; + for stale_key in stale_keys { + if version_keys.valid_keys.contains(&stale_key.nibbles) { + // Normal case: a new node obsoletes a previous version. + } else if version_keys.unreachable_keys.contains(&stale_key.nibbles) { + // Explainable bogus stale key: a node that was updated in `version` before the truncation is no longer updated after truncation. + bogus_stale_keys.push(stale_key); + } else { + tracing::warn!( + version, + ?stale_key, + "Unexplained bogus stale key: not present in any nodes changed in the tree version" + ); + bogus_stale_keys.push(stale_key); + } + } + + if bogus_stale_keys.is_empty() { + return vec![]; + } + + let keys_sample: Vec<_> = bogus_stale_keys.iter().take(SAMPLE_COUNT).collect(); + tracing::info!( + stale_keys.len = bogus_stale_keys.len(), + stale_keys.sample = ?keys_sample, + "Found bogus stale keys" + ); + bogus_stale_keys + } + + /// Returns a boolean flag indicating whether the task data was updated. + fn step(&mut self) -> anyhow::Result> { + let repair_data = self + .db + .stale_keys_repair_data() + .context("failed getting repair data")?; + let min_stale_key_version = self.db.min_stale_key_version(); + let start_version = match (repair_data, min_stale_key_version) { + (_, None) => { + tracing::debug!("No stale keys in tree, nothing to do"); + return Ok(None); + } + (None, Some(version)) => version, + (Some(data), Some(version)) => data.next_version.max(version), + }; + + let latest_version = self + .db + .manifest() + .and_then(|manifest| manifest.version_count.checked_sub(1)); + let Some(latest_version) = latest_version else { + tracing::warn!( + min_stale_key_version, + "Tree has stale keys, but no latest versions" + ); + return Ok(None); + }; + + let end_version = (start_version + self.parallelism - 1).min(latest_version); + let versions = start_version..=end_version; + if versions.is_empty() { + tracing::debug!(?versions, latest_version, "No tree versions to check"); + return Ok(None); + } + + tracing::debug!( + ?versions, + latest_version, + ?min_stale_key_version, + "Checking stale keys" + ); + + let stale_keys = versions + .clone() + .into_par_iter() + .map(|version| { + Self::bogus_stale_keys(&self.db, version) + .into_iter() + .map(|key| StaleNodeKey::new(key, version)) + .collect::>() + }) + .reduce(Vec::new, |mut acc, keys| { + acc.extend(keys); + acc + }); + self.update_task_data(versions.clone(), &stale_keys)?; + + Ok(Some(StepStats { + checked_versions: versions, + repaired_key_count: stale_keys.len(), + })) + } + + #[tracing::instrument( + level = "debug", + err, + skip(self, removed_keys), + fields(removed_keys.len = removed_keys.len()), + )] + fn update_task_data( + &mut self, + versions: ops::RangeInclusive, + removed_keys: &[StaleNodeKey], + ) -> anyhow::Result<()> { + tracing::debug!("Updating task data"); + let started_at = Instant::now(); + let new_data = StaleKeysRepairData { + next_version: *versions.end() + 1, + }; + self.db + .repair_stale_keys(&new_data, removed_keys) + .context("failed removing bogus stale keys")?; + let latency = started_at.elapsed(); + tracing::debug!(?latency, "Updated task data"); + Ok(()) + } + + fn wait_for_abort(&mut self, timeout: Duration) -> bool { + match self.aborted_receiver.recv_timeout(timeout) { + Ok(()) | Err(mpsc::RecvTimeoutError::Disconnected) => true, + Err(mpsc::RecvTimeoutError::Timeout) => false, + } + } + + fn update_stats(&self, step_stats: StepStats) { + let mut stats = self.stats.lock().expect("stats mutex poisoned"); + if let Some(versions) = &mut stats.checked_versions { + *versions = *versions.start()..=*step_stats.checked_versions.end(); + } else { + stats.checked_versions = Some(step_stats.checked_versions); + } + stats.repaired_key_count += step_stats.repaired_key_count; + } + + /// Runs this task indefinitely. + /// + /// # Errors + /// + /// Propagates RocksDB I/O errors. + pub fn run(mut self) -> anyhow::Result<()> { + let repair_data = self + .db + .stale_keys_repair_data() + .context("failed getting repair data")?; + tracing::info!( + paralellism = self.parallelism, + poll_interval = ?self.poll_interval, + ?repair_data, + "Starting repair task" + ); + + let mut wait_interval = Duration::ZERO; + while !self.wait_for_abort(wait_interval) { + wait_interval = if let Some(step_stats) = self.step()? { + self.update_stats(step_stats); + Duration::ZERO + } else { + self.poll_interval + }; + } + tracing::info!("Stop signal received, stale keys repair is shut down"); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::thread; + + use super::*; + use crate::{ + utils::testonly::setup_tree_with_stale_keys, Key, MerkleTree, MerkleTreePruner, TreeEntry, + ValueHash, + }; + + #[test] + fn stale_keys_repair_with_normal_tree() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let mut db = RocksDBWrapper::new(temp_dir.path()).unwrap(); + + // The task should work fine with future tree versions. + for version in [0, 1, 100] { + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, version); + assert!(bogus_stale_keys.is_empty()); + } + + let kvs: Vec<_> = (0_u64..100) + .map(|i| TreeEntry::new(Key::from(i), i + 1, ValueHash::zero())) + .collect(); + MerkleTree::new(&mut db).unwrap().extend(kvs).unwrap(); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, 0); + assert!(bogus_stale_keys.is_empty()); + } + + #[test] + fn detecting_bogus_stale_keys() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let mut db = RocksDBWrapper::new(temp_dir.path()).unwrap(); + setup_tree_with_stale_keys(&mut db, true); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, 1); + assert!(!bogus_stale_keys.is_empty()); + + let (mut task, _handle) = StaleKeysRepairTask::new(db); + task.parallelism = 10; // Ensure that all tree versions are checked at once. + // Repair the tree. + let step_stats = task.step().unwrap().expect("tree was not repaired"); + assert_eq!(step_stats.checked_versions, 1..=1); + assert!(step_stats.repaired_key_count > 0); + // Check that the tree works fine once it's pruned. + let (mut pruner, _) = MerkleTreePruner::new(&mut task.db); + pruner.prune_up_to(1).unwrap().expect("tree was not pruned"); + + MerkleTree::new(&mut task.db) + .unwrap() + .verify_consistency(1, false) + .unwrap(); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&task.db, 1); + assert!(bogus_stale_keys.is_empty()); + MerkleTree::new(&mut task.db) + .unwrap() + .verify_consistency(1, false) + .unwrap(); + + assert!(task.step().unwrap().is_none()); + } + + #[test] + fn full_stale_keys_task_workflow() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let mut db = RocksDBWrapper::new(temp_dir.path()).unwrap(); + setup_tree_with_stale_keys(&mut db, true); + + let (task, handle) = StaleKeysRepairTask::new(db.clone()); + let task_thread = thread::spawn(|| task.run()); + + loop { + if let Some(task_data) = db.stale_keys_repair_data().unwrap() { + if task_data.next_version == 2 { + // All tree versions are processed. + break; + } + } + thread::sleep(Duration::from_millis(50)); + } + let stats = handle.stats(); + assert_eq!(stats.checked_versions, Some(1..=1)); + assert!(stats.repaired_key_count > 0, "{stats:?}"); + + assert!(!task_thread.is_finished()); + drop(handle); + task_thread.join().unwrap().unwrap(); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, 1); + assert!(bogus_stale_keys.is_empty()); + } +} diff --git a/core/lib/merkle_tree/src/storage/rocksdb.rs b/core/lib/merkle_tree/src/storage/rocksdb.rs index 6995bbfbfc7f..5a40c82b680c 100644 --- a/core/lib/merkle_tree/src/storage/rocksdb.rs +++ b/core/lib/merkle_tree/src/storage/rocksdb.rs @@ -1,6 +1,13 @@ //! RocksDB implementation of [`Database`]. -use std::{any::Any, cell::RefCell, ops, path::Path, sync::Arc}; +use std::{ + any::Any, + cell::RefCell, + collections::{HashMap, HashSet}, + ops, + path::Path, + sync::Arc, +}; use anyhow::Context as _; use rayon::prelude::*; @@ -15,6 +22,7 @@ use zksync_storage::{ use crate::{ errors::{DeserializeError, ErrorContext}, metrics::ApplyPatchStats, + repair::StaleKeysRepairData, storage::{ database::{PruneDatabase, PrunePatchSet}, Database, NodeKeys, PatchSet, @@ -70,6 +78,15 @@ impl ToDbKey for (NodeKey, bool) { } } +/// All node keys modified in a certain version of the tree, loaded via a prefix iterator. +#[derive(Debug, Default)] +pub(crate) struct VersionKeys { + /// Valid / reachable keys modified in the version. + pub valid_keys: HashSet, + /// Unreachable keys modified in the version, e.g. as a result of truncating the tree and overwriting the version. + pub unreachable_keys: HashSet, +} + /// Main [`Database`] implementation wrapping a [`RocksDB`] reference. /// /// # Cloning @@ -97,6 +114,8 @@ impl RocksDBWrapper { // since the minimum node key is [0, 0, 0, 0, 0, 0, 0, 0]. const MANIFEST_KEY: &'static [u8] = &[0]; + const STALE_KEYS_REPAIR_KEY: &'static [u8] = &[0, 0]; + /// Creates a new wrapper, initializing RocksDB at the specified directory. /// /// # Errors @@ -174,6 +193,83 @@ impl RocksDBWrapper { }) } + pub(crate) fn all_keys_for_version( + &self, + version: u64, + ) -> Result { + let Some(Root::Filled { + node: root_node, .. + }) = self.root(version) + else { + return Ok(VersionKeys::default()); + }; + + let cf = MerkleTreeColumnFamily::Tree; + let version_prefix = version.to_be_bytes(); + let mut nodes = HashMap::from([(Nibbles::EMPTY, root_node)]); + let mut unreachable_keys = HashSet::new(); + + for (raw_key, raw_value) in self.db.prefix_iterator_cf(cf, &version_prefix) { + let key = NodeKey::from_db_key(&raw_key); + let Some((parent_nibbles, nibble)) = key.nibbles.split_last() else { + // Root node, already processed + continue; + }; + let Some(Node::Internal(parent)) = nodes.get(&parent_nibbles) else { + unreachable_keys.insert(key.nibbles); + continue; + }; + let Some(this_ref) = parent.child_ref(nibble) else { + unreachable_keys.insert(key.nibbles); + continue; + }; + if this_ref.version != version { + unreachable_keys.insert(key.nibbles); + continue; + } + + // Now we are sure that `this_ref` actually points to the node we're processing. + let node = Self::deserialize_node(&raw_value, &key, this_ref.is_leaf)?; + nodes.insert(key.nibbles, node); + } + + Ok(VersionKeys { + valid_keys: nodes.into_keys().collect(), + unreachable_keys, + }) + } + + pub(crate) fn repair_stale_keys( + &mut self, + data: &StaleKeysRepairData, + removed_keys: &[StaleNodeKey], + ) -> anyhow::Result<()> { + let mut raw_value = vec![]; + data.serialize(&mut raw_value); + + let mut write_batch = self.db.new_write_batch(); + write_batch.put_cf( + MerkleTreeColumnFamily::Tree, + Self::STALE_KEYS_REPAIR_KEY, + &raw_value, + ); + for key in removed_keys { + write_batch.delete_cf(MerkleTreeColumnFamily::StaleKeys, &key.to_db_key()); + } + self.db + .write(write_batch) + .context("Failed writing a batch to RocksDB") + } + + pub(crate) fn stale_keys_repair_data( + &self, + ) -> Result, DeserializeError> { + let Some(raw_value) = self.raw_node(Self::STALE_KEYS_REPAIR_KEY) else { + return Ok(None); + }; + StaleKeysRepairData::deserialize(&raw_value).map(Some) + } + /// Returns the wrapped RocksDB instance. pub fn into_inner(self) -> RocksDB { self.db diff --git a/core/lib/merkle_tree/src/storage/serialization.rs b/core/lib/merkle_tree/src/storage/serialization.rs index d0c573fd8170..700a4cd5020b 100644 --- a/core/lib/merkle_tree/src/storage/serialization.rs +++ b/core/lib/merkle_tree/src/storage/serialization.rs @@ -4,6 +4,7 @@ use std::{collections::HashMap, str}; use crate::{ errors::{DeserializeError, DeserializeErrorKind, ErrorContext}, + repair::StaleKeysRepairData, types::{ ChildRef, InternalNode, Key, LeafNode, Manifest, Node, RawNode, Root, TreeTags, ValueHash, HASH_SIZE, KEY_SIZE, @@ -355,6 +356,18 @@ impl Manifest { } } +impl StaleKeysRepairData { + pub(super) fn deserialize(mut bytes: &[u8]) -> Result { + let next_version = + leb128::read::unsigned(&mut bytes).map_err(DeserializeErrorKind::Leb128)?; + Ok(Self { next_version }) + } + + pub(super) fn serialize(&self, buffer: &mut Vec) { + leb128::write::unsigned(buffer, self.next_version).unwrap(); + } +} + #[cfg(test)] mod tests { use zksync_types::H256; diff --git a/core/lib/merkle_tree/src/utils.rs b/core/lib/merkle_tree/src/utils.rs index 4771a940f2c8..a3c025a8b7bd 100644 --- a/core/lib/merkle_tree/src/utils.rs +++ b/core/lib/merkle_tree/src/utils.rs @@ -165,6 +165,49 @@ impl Iterator for MergingIter { impl ExactSizeIterator for MergingIter {} +#[cfg(test)] +pub(crate) mod testonly { + use crate::{Key, MerkleTree, PruneDatabase, TreeEntry, ValueHash}; + + pub(crate) fn setup_tree_with_stale_keys(db: impl PruneDatabase, incorrect_truncation: bool) { + let mut tree = MerkleTree::new(db).unwrap(); + let kvs: Vec<_> = (0_u64..100) + .map(|i| TreeEntry::new(Key::from(i), i + 1, ValueHash::zero())) + .collect(); + tree.extend(kvs).unwrap(); + + let overridden_kvs = vec![TreeEntry::new( + Key::from(0), + 1, + ValueHash::repeat_byte(0xaa), + )]; + tree.extend(overridden_kvs).unwrap(); + + let stale_keys = tree.db.stale_keys(1); + assert!( + stale_keys.iter().any(|key| !key.is_empty()), + "{stale_keys:?}" + ); + + // Revert `overridden_kvs`. + if incorrect_truncation { + tree.truncate_recent_versions_incorrectly(1).unwrap(); + } else { + tree.truncate_recent_versions(1).unwrap(); + } + assert_eq!(tree.latest_version(), Some(0)); + let future_stale_keys = tree.db.stale_keys(1); + assert_eq!(future_stale_keys.is_empty(), !incorrect_truncation); + + // Add a new version without the key. To make the matter more egregious, the inserted key + // differs from all existing keys, starting from the first nibble. + let new_key = Key::from_big_endian(&[0xaa; 32]); + let new_kvs = vec![TreeEntry::new(new_key, 101, ValueHash::repeat_byte(0xaa))]; + tree.extend(new_kvs).unwrap(); + assert_eq!(tree.latest_version(), Some(1)); + } +} + #[cfg(test)] mod tests { use zksync_types::U256; diff --git a/core/lib/protobuf_config/src/experimental.rs b/core/lib/protobuf_config/src/experimental.rs index 750dc7b04f01..8dfbf413d5a1 100644 --- a/core/lib/protobuf_config/src/experimental.rs +++ b/core/lib/protobuf_config/src/experimental.rs @@ -30,13 +30,12 @@ impl ProtoRepr for proto::Db { .map(|count| NonZeroU32::new(count).context("cannot be 0")) .transpose() .context("state_keeper_db_max_open_files")?, - protective_reads_persistence_enabled: self - .reads_persistence_enabled - .unwrap_or_default(), + protective_reads_persistence_enabled: self.reads_persistence_enabled.unwrap_or(false), processing_delay_ms: self.processing_delay_ms.unwrap_or_default(), include_indices_and_filters_in_block_cache: self .include_indices_and_filters_in_block_cache - .unwrap_or_default(), + .unwrap_or(false), + merkle_tree_repair_stale_keys: self.merkle_tree_repair_stale_keys.unwrap_or(false), }) } @@ -55,6 +54,7 @@ impl ProtoRepr for proto::Db { include_indices_and_filters_in_block_cache: Some( this.include_indices_and_filters_in_block_cache, ), + merkle_tree_repair_stale_keys: Some(this.merkle_tree_repair_stale_keys), } } } diff --git a/core/lib/protobuf_config/src/proto/config/experimental.proto b/core/lib/protobuf_config/src/proto/config/experimental.proto index 87af8d3835c6..22de076ece27 100644 --- a/core/lib/protobuf_config/src/proto/config/experimental.proto +++ b/core/lib/protobuf_config/src/proto/config/experimental.proto @@ -10,7 +10,8 @@ message DB { optional uint32 state_keeper_db_max_open_files = 2; // optional optional bool reads_persistence_enabled = 3; optional uint64 processing_delay_ms = 4; - optional bool include_indices_and_filters_in_block_cache = 5; + optional bool include_indices_and_filters_in_block_cache = 5; // optional; defaults to false + optional bool merkle_tree_repair_stale_keys = 6; // optional; defaults to false } // Experimental part of the Snapshot recovery configuration. diff --git a/core/node/metadata_calculator/src/api_server/metrics.rs b/core/node/metadata_calculator/src/api_server/metrics.rs index 92f948e09702..a2444e639943 100644 --- a/core/node/metadata_calculator/src/api_server/metrics.rs +++ b/core/node/metadata_calculator/src/api_server/metrics.rs @@ -11,6 +11,7 @@ pub(super) enum MerkleTreeApiMethod { GetProofs, GetNodes, GetStaleKeys, + GetBogusStaleKeys, } /// Metrics for Merkle tree API. diff --git a/core/node/metadata_calculator/src/api_server/mod.rs b/core/node/metadata_calculator/src/api_server/mod.rs index ced29310408e..51f25c99ddef 100644 --- a/core/node/metadata_calculator/src/api_server/mod.rs +++ b/core/node/metadata_calculator/src/api_server/mod.rs @@ -486,6 +486,17 @@ impl AsyncTreeReader { Json(StaleKeysResponse { stale_keys }) } + async fn bogus_stale_keys_handler( + State(this): State, + Json(request): Json, + ) -> Json { + let latency = API_METRICS.latency[&MerkleTreeApiMethod::GetBogusStaleKeys].start(); + let stale_keys = this.clone().bogus_stale_keys(request.l1_batch_number).await; + let stale_keys = stale_keys.into_iter().map(HexNodeKey).collect(); + latency.observe(); + Json(StaleKeysResponse { stale_keys }) + } + async fn create_api_server( self, bind_address: &SocketAddr, @@ -501,6 +512,10 @@ impl AsyncTreeReader { "/debug/stale-keys", routing::post(Self::get_stale_keys_handler), ) + .route( + "/debug/stale-keys/bogus", + routing::post(Self::bogus_stale_keys_handler), + ) .with_state(self); let listener = tokio::net::TcpListener::bind(bind_address) diff --git a/core/node/metadata_calculator/src/api_server/tests.rs b/core/node/metadata_calculator/src/api_server/tests.rs index 815522a4cd8e..9bb994cb4163 100644 --- a/core/node/metadata_calculator/src/api_server/tests.rs +++ b/core/node/metadata_calculator/src/api_server/tests.rs @@ -96,6 +96,21 @@ async fn merkle_tree_api() { let raw_stale_keys_response: serde_json::Value = raw_stale_keys_response.json().await.unwrap(); assert_raw_stale_keys_response(&raw_stale_keys_response); + let raw_stale_keys_response = api_client + .inner + .post(format!("http://{local_addr}/debug/stale-keys/bogus")) + .json(&serde_json::json!({ "l1_batch_number": 1 })) + .send() + .await + .unwrap() + .error_for_status() + .unwrap(); + let raw_stale_keys_response: serde_json::Value = raw_stale_keys_response.json().await.unwrap(); + assert_eq!( + raw_stale_keys_response, + serde_json::json!({ "stale_keys": [] }) + ); + // Stop the calculator and the tree API server. stop_sender.send_replace(true); api_server_task.await.unwrap().unwrap(); diff --git a/core/node/metadata_calculator/src/helpers.rs b/core/node/metadata_calculator/src/helpers.rs index 3f370afaf77e..b8d02067f8ea 100644 --- a/core/node/metadata_calculator/src/helpers.rs +++ b/core/node/metadata_calculator/src/helpers.rs @@ -22,6 +22,7 @@ use zksync_health_check::{CheckHealth, Health, HealthStatus, ReactiveHealthCheck use zksync_merkle_tree::{ domain::{TreeMetadata, ZkSyncTree, ZkSyncTreeReader}, recovery::{MerkleTreeRecovery, PersistenceThreadHandle}, + repair::StaleKeysRepairTask, unstable::{NodeKey, RawNode}, Database, Key, MerkleTreeColumnFamily, NoVersionError, RocksDBWrapper, TreeEntry, TreeEntryWithProof, TreeInstruction, @@ -420,6 +421,19 @@ impl AsyncTreeReader { .await .unwrap() } + + pub(crate) async fn bogus_stale_keys(self, l1_batch_number: L1BatchNumber) -> Vec { + let version = l1_batch_number.0.into(); + tokio::task::spawn_blocking(move || { + StaleKeysRepairTask::bogus_stale_keys(self.inner.db(), version) + }) + .await + .unwrap() + } + + pub(crate) fn into_db(self) -> RocksDBWrapper { + self.inner.into_db() + } } /// Version of async tree reader that holds a weak reference to RocksDB. Used in [`MerkleTreeHealthCheck`]. diff --git a/core/node/metadata_calculator/src/lib.rs b/core/node/metadata_calculator/src/lib.rs index 5c64330a0e7d..dddb53b4c52f 100644 --- a/core/node/metadata_calculator/src/lib.rs +++ b/core/node/metadata_calculator/src/lib.rs @@ -26,6 +26,7 @@ use self::{ pub use self::{ helpers::{AsyncTreeReader, LazyAsyncTreeReader, MerkleTreeInfo}, pruning::MerkleTreePruningTask, + repair::StaleKeysRepairTask, }; use crate::helpers::create_readonly_db; @@ -34,6 +35,7 @@ mod helpers; mod metrics; mod pruning; mod recovery; +mod repair; #[cfg(test)] pub(crate) mod tests; mod updater; @@ -203,6 +205,11 @@ impl MetadataCalculator { MerkleTreePruningTask::new(pruning_handles, self.pool.clone(), poll_interval) } + /// This method should be called once. + pub fn stale_keys_repair_task(&self) -> StaleKeysRepairTask { + StaleKeysRepairTask::new(self.tree_reader()) + } + async fn create_tree(&self) -> anyhow::Result { self.health_updater .update(MerkleTreeHealth::Initialization.into()); diff --git a/core/node/metadata_calculator/src/repair.rs b/core/node/metadata_calculator/src/repair.rs new file mode 100644 index 000000000000..9dfec4348ed6 --- /dev/null +++ b/core/node/metadata_calculator/src/repair.rs @@ -0,0 +1,258 @@ +//! High-level wrapper for the stale keys repair task. + +use std::{ + sync::{Arc, Weak}, + time::Duration, +}; + +use anyhow::Context as _; +use async_trait::async_trait; +use once_cell::sync::OnceCell; +use serde::Serialize; +use tokio::sync::watch; +use zksync_health_check::{CheckHealth, Health, HealthStatus}; +use zksync_merkle_tree::repair; + +use crate::LazyAsyncTreeReader; + +#[derive(Debug, Serialize)] +struct RepairHealthDetails { + #[serde(skip_serializing_if = "Option::is_none")] + earliest_checked_version: Option, + #[serde(skip_serializing_if = "Option::is_none")] + latest_checked_version: Option, + repaired_key_count: usize, +} + +impl From for RepairHealthDetails { + fn from(stats: repair::StaleKeysRepairStats) -> Self { + let versions = stats.checked_versions.as_ref(); + Self { + earliest_checked_version: versions.map(|versions| *versions.start()), + latest_checked_version: versions.map(|versions| *versions.end()), + repaired_key_count: stats.repaired_key_count, + } + } +} + +#[derive(Debug, Default)] +struct RepairHealthCheck { + handle: OnceCell>, +} + +#[async_trait] +impl CheckHealth for RepairHealthCheck { + fn name(&self) -> &'static str { + "tree_stale_keys_repair" + } + + async fn check_health(&self) -> Health { + let Some(weak_handle) = self.handle.get() else { + return HealthStatus::Affected.into(); + }; + let Some(handle) = weak_handle.upgrade() else { + return HealthStatus::ShutDown.into(); + }; + Health::from(HealthStatus::Ready).with_details(RepairHealthDetails::from(handle.stats())) + } +} + +/// Stale keys repair task. +#[derive(Debug)] +#[must_use = "Task should `run()` in a managed Tokio task"] +pub struct StaleKeysRepairTask { + tree_reader: LazyAsyncTreeReader, + health_check: Arc, + poll_interval: Duration, +} + +impl StaleKeysRepairTask { + pub(super) fn new(tree_reader: LazyAsyncTreeReader) -> Self { + Self { + tree_reader, + health_check: Arc::default(), + poll_interval: Duration::from_secs(60), + } + } + + pub fn health_check(&self) -> Arc { + self.health_check.clone() + } + + /// Runs this task indefinitely. + #[tracing::instrument(skip_all)] + pub async fn run(self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let db = tokio::select! { + res = self.tree_reader.wait() => { + match res { + Some(reader) => reader.into_db(), + None => { + tracing::info!("Merkle tree dropped; shutting down stale keys repair"); + return Ok(()); + } + } + } + _ = stop_receiver.changed() => { + tracing::info!("Stop signal received before Merkle tree is initialized; shutting down stale keys repair"); + return Ok(()); + } + }; + + let (mut task, handle) = repair::StaleKeysRepairTask::new(db); + task.set_poll_interval(self.poll_interval); + let handle = Arc::new(handle); + self.health_check + .handle + .set(Arc::downgrade(&handle)) + .map_err(|_| anyhow::anyhow!("failed setting health check handle"))?; + + let mut task = tokio::task::spawn_blocking(|| task.run()); + tokio::select! { + res = &mut task => { + tracing::error!("Stale keys repair spontaneously stopped"); + res.context("repair task panicked")? + }, + _ = stop_receiver.changed() => { + tracing::info!("Stop signal received, stale keys repair is shutting down"); + // This is the only strong reference to the handle, so dropping it should signal the task to stop. + drop(handle); + task.await.context("stale keys repair task panicked")? + } + } + } +} + +#[cfg(test)] +mod tests { + use std::time::Duration; + + use tempfile::TempDir; + use zksync_dal::{ConnectionPool, Core}; + use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; + use zksync_types::L1BatchNumber; + + use super::*; + use crate::{ + tests::{extend_db_state, gen_storage_logs, mock_config, reset_db_state}, + MetadataCalculator, + }; + + const POLL_INTERVAL: Duration = Duration::from_millis(50); + + async fn wait_for_health( + check: &dyn CheckHealth, + mut condition: impl FnMut(&Health) -> bool, + ) -> Health { + loop { + let health = check.check_health().await; + if condition(&health) { + return health; + } else if matches!( + health.status(), + HealthStatus::ShutDown | HealthStatus::Panicked + ) { + panic!("reached terminal health: {health:?}"); + } + tokio::time::sleep(POLL_INTERVAL).await; + } + } + + #[tokio::test] + async fn repair_task_basics() { + let pool = ConnectionPool::::test_pool().await; + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let config = mock_config(temp_dir.path()); + let mut storage = pool.connection().await.unwrap(); + insert_genesis_batch(&mut storage, &GenesisParams::mock()) + .await + .unwrap(); + reset_db_state(&pool, 5).await; + + let calculator = MetadataCalculator::new(config, None, pool.clone()) + .await + .unwrap(); + let reader = calculator.tree_reader(); + let mut repair_task = calculator.stale_keys_repair_task(); + repair_task.poll_interval = POLL_INTERVAL; + let health_check = repair_task.health_check(); + + let (stop_sender, stop_receiver) = watch::channel(false); + let calculator_handle = tokio::spawn(calculator.run(stop_receiver.clone())); + let repair_task_handle = tokio::spawn(repair_task.run(stop_receiver)); + wait_for_health(&health_check, |health| { + matches!(health.status(), HealthStatus::Ready) + }) + .await; + + // Wait until the calculator is initialized and then drop the reader so that it doesn't lock RocksDB. + { + let reader = reader.wait().await.unwrap(); + while reader.clone().info().await.next_l1_batch_number < L1BatchNumber(6) { + tokio::time::sleep(POLL_INTERVAL).await; + } + } + + // Wait until all tree versions have been checked. + let health = wait_for_health(&health_check, |health| { + if !matches!(health.status(), HealthStatus::Ready) { + return false; + } + let details = health.details().unwrap(); + details.get("latest_checked_version") == Some(&5.into()) + }) + .await; + let details = health.details().unwrap(); + assert_eq!(details["earliest_checked_version"], 1); + assert_eq!(details["repaired_key_count"], 0); + + stop_sender.send_replace(true); + calculator_handle.await.unwrap().unwrap(); + repair_task_handle.await.unwrap().unwrap(); + wait_for_health(&health_check, |health| { + matches!(health.status(), HealthStatus::ShutDown) + }) + .await; + + test_repair_persistence(temp_dir, pool).await; + } + + async fn test_repair_persistence(temp_dir: TempDir, pool: ConnectionPool) { + let config = mock_config(temp_dir.path()); + let calculator = MetadataCalculator::new(config, None, pool.clone()) + .await + .unwrap(); + let mut repair_task = calculator.stale_keys_repair_task(); + repair_task.poll_interval = POLL_INTERVAL; + let health_check = repair_task.health_check(); + + let (stop_sender, stop_receiver) = watch::channel(false); + let calculator_handle = tokio::spawn(calculator.run(stop_receiver.clone())); + let repair_task_handle = tokio::spawn(repair_task.run(stop_receiver)); + wait_for_health(&health_check, |health| { + matches!(health.status(), HealthStatus::Ready) + }) + .await; + + // Add more batches to the storage. + let mut storage = pool.connection().await.unwrap(); + let logs = gen_storage_logs(200..300, 5); + extend_db_state(&mut storage, logs).await; + + // Wait until new tree versions have been checked. + let health = wait_for_health(&health_check, |health| { + if !matches!(health.status(), HealthStatus::Ready) { + return false; + } + let details = health.details().unwrap(); + details.get("latest_checked_version") == Some(&10.into()) + }) + .await; + let details = health.details().unwrap(); + assert_eq!(details["earliest_checked_version"], 6); + assert_eq!(details["repaired_key_count"], 0); + + stop_sender.send_replace(true); + calculator_handle.await.unwrap().unwrap(); + repair_task_handle.await.unwrap().unwrap(); + } +} diff --git a/core/node/node_framework/src/implementations/layers/metadata_calculator.rs b/core/node/node_framework/src/implementations/layers/metadata_calculator.rs index 4092ee6dcd56..45aa320786ef 100644 --- a/core/node/node_framework/src/implementations/layers/metadata_calculator.rs +++ b/core/node/node_framework/src/implementations/layers/metadata_calculator.rs @@ -8,7 +8,7 @@ use anyhow::Context as _; use zksync_config::configs::{api::MerkleTreeApiConfig, database::MerkleTreeMode}; use zksync_metadata_calculator::{ LazyAsyncTreeReader, MerkleTreePruningTask, MerkleTreeReaderConfig, MetadataCalculator, - MetadataCalculatorConfig, TreeReaderTask, + MetadataCalculatorConfig, StaleKeysRepairTask, TreeReaderTask, }; use zksync_storage::RocksDB; @@ -31,6 +31,7 @@ pub struct MetadataCalculatorLayer { config: MetadataCalculatorConfig, tree_api_config: Option, pruning_config: Option, + stale_keys_repair_enabled: bool, } #[derive(Debug, FromContext)] @@ -56,6 +57,9 @@ pub struct Output { /// Only provided if configuration is provided. #[context(task)] pub pruning_task: Option, + /// Only provided if enabled in the config. + #[context(task)] + pub stale_keys_repair_task: Option, pub rocksdb_shutdown_hook: ShutdownHook, } @@ -65,6 +69,7 @@ impl MetadataCalculatorLayer { config, tree_api_config: None, pruning_config: None, + stale_keys_repair_enabled: false, } } @@ -77,6 +82,11 @@ impl MetadataCalculatorLayer { self.pruning_config = Some(pruning_config); self } + + pub fn with_stale_keys_repair(mut self) -> Self { + self.stale_keys_repair_enabled = true; + self + } } #[async_trait::async_trait] @@ -141,6 +151,12 @@ impl WiringLayer for MetadataCalculatorLayer { ) .transpose()?; + let stale_keys_repair_task = if self.stale_keys_repair_enabled { + Some(metadata_calculator.stale_keys_repair_task()) + } else { + None + }; + let tree_api_client = TreeApiClientResource(Arc::new(metadata_calculator.tree_reader())); let rocksdb_shutdown_hook = ShutdownHook::new("rocksdb_terminaton", async { @@ -155,6 +171,7 @@ impl WiringLayer for MetadataCalculatorLayer { tree_api_client, tree_api_task, pruning_task, + stale_keys_repair_task, rocksdb_shutdown_hook, }) } @@ -196,6 +213,17 @@ impl Task for TreeApiTask { } } +#[async_trait::async_trait] +impl Task for StaleKeysRepairTask { + fn id(&self) -> TaskId { + "merkle_tree_stale_keys_repair_task".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} + #[async_trait::async_trait] impl Task for MerkleTreePruningTask { fn id(&self) -> TaskId { From 75f7db9b535b4dee4c6662be609aec996555383c Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Tue, 12 Nov 2024 12:01:28 +0200 Subject: [PATCH 18/23] feat(contract-verifier): Support Vyper toolchain for EVM bytecodes (#3251) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Supports the Vyper toolchain for EVM bytecodes in the contract verifier. - Adds some more unit tests for the verifier (e.g., testing multi-file inputs, Yul contracts, abstract contract errors etc.). ## Why ❔ Part of preparations to support EVM bytecodes throughout the codebase. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- Cargo.lock | 1 + core/lib/contract_verifier/Cargo.toml | 2 + .../contract_verifier/src/compilers/mod.rs | 84 ++++- .../contract_verifier/src/compilers/solc.rs | 33 +- .../contract_verifier/src/compilers/vyper.rs | 114 ++++++ .../contract_verifier/src/compilers/zksolc.rs | 14 +- .../src/compilers/zkvyper.rs | 135 ++++--- core/lib/contract_verifier/src/lib.rs | 25 +- core/lib/contract_verifier/src/resolver.rs | 22 +- core/lib/contract_verifier/src/tests/mod.rs | 70 +++- core/lib/contract_verifier/src/tests/real.rs | 350 +++++++++++++++++- .../types/src/contract_verification_api.rs | 2 + 12 files changed, 720 insertions(+), 132 deletions(-) create mode 100644 core/lib/contract_verifier/src/compilers/vyper.rs diff --git a/Cargo.lock b/Cargo.lock index bdd2f84527b8..eb93300b1729 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10817,6 +10817,7 @@ name = "zksync_contract_verifier_lib" version = "0.1.0" dependencies = [ "anyhow", + "assert_matches", "chrono", "ethabi", "hex", diff --git a/core/lib/contract_verifier/Cargo.toml b/core/lib/contract_verifier/Cargo.toml index bdbfa90bf76a..6ccd6422d7da 100644 --- a/core/lib/contract_verifier/Cargo.toml +++ b/core/lib/contract_verifier/Cargo.toml @@ -34,4 +34,6 @@ semver.workspace = true [dev-dependencies] zksync_node_test_utils.workspace = true zksync_vm_interface.workspace = true + +assert_matches.workspace = true test-casing.workspace = true diff --git a/core/lib/contract_verifier/src/compilers/mod.rs b/core/lib/contract_verifier/src/compilers/mod.rs index a56b4e32d1a1..c82a6575ee4c 100644 --- a/core/lib/contract_verifier/src/compilers/mod.rs +++ b/core/lib/contract_verifier/src/compilers/mod.rs @@ -1,18 +1,50 @@ +use std::collections::HashMap; + use anyhow::Context as _; use serde::{Deserialize, Serialize}; use zksync_types::contract_verification_api::CompilationArtifacts; pub(crate) use self::{ solc::{Solc, SolcInput}, + vyper::{Vyper, VyperInput}, zksolc::{ZkSolc, ZkSolcInput}, - zkvyper::{ZkVyper, ZkVyperInput}, + zkvyper::ZkVyper, }; use crate::error::ContractVerifierError; mod solc; +mod vyper; mod zksolc; mod zkvyper; +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct StandardJson { + pub language: String, + pub sources: HashMap, + #[serde(default)] + settings: Settings, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct Settings { + /// The output selection filters. + output_selection: Option, + /// Other settings (only filled when parsing `StandardJson` input from the request). + #[serde(flatten)] + other: serde_json::Value, +} + +impl Default for Settings { + fn default() -> Self { + Self { + output_selection: None, + other: serde_json::json!({}), + } + } +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Source { @@ -20,6 +52,18 @@ pub(crate) struct Source { pub content: String, } +/// Users may provide either just contract name or source file name and contract name joined with ":". +fn process_contract_name(original_name: &str, extension: &str) -> (String, String) { + if let Some((file_name, contract_name)) = original_name.rsplit_once(':') { + (file_name.to_owned(), contract_name.to_owned()) + } else { + ( + format!("{original_name}.{extension}"), + original_name.to_owned(), + ) + } +} + /// Parsing logic shared between `solc` and `zksolc`. fn parse_standard_json_output( output: &serde_json::Value, @@ -31,11 +75,16 @@ fn parse_standard_json_output( let errors = errors.as_array().unwrap().clone(); if errors .iter() - .any(|err| err["severity"].as_str().unwrap() == "error") + .any(|err| err["severity"].as_str() == Some("error")) { let error_messages = errors .into_iter() - .map(|err| err["formattedMessage"].clone()) + .filter_map(|err| { + // `formattedMessage` is an optional field + err.get("formattedMessage") + .or_else(|| err.get("message")) + .cloned() + }) .collect(); return Err(ContractVerifierError::CompilationError( serde_json::Value::Array(error_messages), @@ -50,28 +99,35 @@ fn parse_standard_json_output( return Err(ContractVerifierError::MissingContract(contract_name)); }; - let Some(bytecode_str) = contract - .pointer("/evm/bytecode/object") - .context("missing bytecode in solc / zksolc output")? - .as_str() - else { + let Some(bytecode_str) = contract.pointer("/evm/bytecode/object") else { return Err(ContractVerifierError::AbstractContract(contract_name)); }; + let bytecode_str = bytecode_str + .as_str() + .context("unexpected `/evm/bytecode/object` value")?; + // Strip an optional `0x` prefix (output by `vyper`, but not by `solc` / `zksolc`) + let bytecode_str = bytecode_str.strip_prefix("0x").unwrap_or(bytecode_str); let bytecode = hex::decode(bytecode_str).context("invalid bytecode")?; let deployed_bytecode = if get_deployed_bytecode { - let bytecode_str = contract - .pointer("/evm/deployedBytecode/object") - .context("missing deployed bytecode in solc output")? + let Some(bytecode_str) = contract.pointer("/evm/deployedBytecode/object") else { + return Err(ContractVerifierError::AbstractContract(contract_name)); + }; + let bytecode_str = bytecode_str .as_str() - .ok_or(ContractVerifierError::AbstractContract(contract_name))?; + .context("unexpected `/evm/deployedBytecode/object` value")?; + let bytecode_str = bytecode_str.strip_prefix("0x").unwrap_or(bytecode_str); Some(hex::decode(bytecode_str).context("invalid deployed bytecode")?) } else { None }; - let abi = contract["abi"].clone(); - if !abi.is_array() { + let mut abi = contract["abi"].clone(); + if abi.is_null() { + // ABI is undefined for Yul contracts when compiled with standalone `solc`. For uniformity with `zksolc`, + // replace it with an empty array. + abi = serde_json::json!([]); + } else if !abi.is_array() { let err = anyhow::anyhow!( "unexpected value for ABI: {}", serde_json::to_string_pretty(&abi).unwrap() diff --git a/core/lib/contract_verifier/src/compilers/solc.rs b/core/lib/contract_verifier/src/compilers/solc.rs index bb453cb729c2..10adcad3542e 100644 --- a/core/lib/contract_verifier/src/compilers/solc.rs +++ b/core/lib/contract_verifier/src/compilers/solc.rs @@ -1,14 +1,13 @@ use std::{collections::HashMap, path::PathBuf, process::Stdio}; use anyhow::Context; -use serde::{Deserialize, Serialize}; use tokio::io::AsyncWriteExt; use zksync_queued_job_processor::async_trait; use zksync_types::contract_verification_api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; -use super::{parse_standard_json_output, Source}; +use super::{parse_standard_json_output, process_contract_name, Settings, Source, StandardJson}; use crate::{error::ContractVerifierError, resolver::Compiler}; // Here and below, fields are public for testing purposes. @@ -19,24 +18,6 @@ pub(crate) struct SolcInput { pub file_name: String, } -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct StandardJson { - pub language: String, - pub sources: HashMap, - settings: Settings, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct Settings { - /// The output selection filters. - output_selection: Option, - /// Other settings (only filled when parsing `StandardJson` input from the request). - #[serde(flatten)] - other: serde_json::Value, -} - #[derive(Debug)] pub(crate) struct Solc { path: PathBuf, @@ -50,17 +31,7 @@ impl Solc { pub fn build_input( req: VerificationIncomingRequest, ) -> Result { - // Users may provide either just contract name or - // source file name and contract name joined with ":". - let (file_name, contract_name) = - if let Some((file_name, contract_name)) = req.contract_name.rsplit_once(':') { - (file_name.to_string(), contract_name.to_string()) - } else { - ( - format!("{}.sol", req.contract_name), - req.contract_name.clone(), - ) - }; + let (file_name, contract_name) = process_contract_name(&req.contract_name, "sol"); let default_output_selection = serde_json::json!({ "*": { "*": [ "abi", "evm.bytecode", "evm.deployedBytecode" ], diff --git a/core/lib/contract_verifier/src/compilers/vyper.rs b/core/lib/contract_verifier/src/compilers/vyper.rs new file mode 100644 index 000000000000..59b950f9f17f --- /dev/null +++ b/core/lib/contract_verifier/src/compilers/vyper.rs @@ -0,0 +1,114 @@ +use std::{collections::HashMap, mem, path::PathBuf, process::Stdio}; + +use anyhow::Context; +use tokio::io::AsyncWriteExt; +use zksync_queued_job_processor::async_trait; +use zksync_types::contract_verification_api::{ + CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, +}; + +use super::{parse_standard_json_output, process_contract_name, Settings, Source, StandardJson}; +use crate::{error::ContractVerifierError, resolver::Compiler}; + +#[derive(Debug)] +pub(crate) struct VyperInput { + pub contract_name: String, + pub file_name: String, + pub sources: HashMap, + pub optimizer_mode: Option, +} + +impl VyperInput { + pub fn new(req: VerificationIncomingRequest) -> Result { + let (file_name, contract_name) = process_contract_name(&req.contract_name, "vy"); + + let sources = match req.source_code_data { + SourceCodeData::VyperMultiFile(s) => s, + other => unreachable!("unexpected `SourceCodeData` variant: {other:?}"), + }; + Ok(Self { + contract_name, + file_name, + sources, + optimizer_mode: if req.optimization_used { + req.optimizer_mode + } else { + // `none` mode is not the default mode (which is `gas`), so we must specify it explicitly here + Some("none".to_owned()) + }, + }) + } + + fn take_standard_json(&mut self) -> StandardJson { + let sources = mem::take(&mut self.sources); + let sources = sources + .into_iter() + .map(|(name, content)| (name, Source { content })); + + StandardJson { + language: "Vyper".to_owned(), + sources: sources.collect(), + settings: Settings { + output_selection: Some(serde_json::json!({ + "*": [ "abi", "evm.bytecode", "evm.deployedBytecode" ], + })), + other: serde_json::json!({ + "optimize": self.optimizer_mode.as_deref(), + }), + }, + } + } +} + +#[derive(Debug)] +pub(crate) struct Vyper { + path: PathBuf, +} + +impl Vyper { + pub fn new(path: PathBuf) -> Self { + Self { path } + } +} + +#[async_trait] +impl Compiler for Vyper { + async fn compile( + self: Box, + mut input: VyperInput, + ) -> Result { + let mut command = tokio::process::Command::new(&self.path); + let mut child = command + .arg("--standard-json") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context("cannot spawn vyper")?; + let mut stdin = child.stdin.take().unwrap(); + let standard_json = input.take_standard_json(); + let content = serde_json::to_vec(&standard_json) + .context("cannot encode standard JSON input for vyper")?; + stdin + .write_all(&content) + .await + .context("failed writing standard JSON to vyper stdin")?; + stdin + .flush() + .await + .context("failed flushing standard JSON to vyper")?; + drop(stdin); + + let output = child.wait_with_output().await.context("vyper failed")?; + if output.status.success() { + let output = + serde_json::from_slice(&output.stdout).context("vyper output is not valid JSON")?; + parse_standard_json_output(&output, input.contract_name, input.file_name, true) + } else { + Err(ContractVerifierError::CompilerError( + "vyper", + String::from_utf8_lossy(&output.stderr).to_string(), + )) + } + } +} diff --git a/core/lib/contract_verifier/src/compilers/zksolc.rs b/core/lib/contract_verifier/src/compilers/zksolc.rs index 0d6b5828e31c..ff435e96aeb6 100644 --- a/core/lib/contract_verifier/src/compilers/zksolc.rs +++ b/core/lib/contract_verifier/src/compilers/zksolc.rs @@ -10,7 +10,7 @@ use zksync_types::contract_verification_api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; -use super::{parse_standard_json_output, Source}; +use super::{parse_standard_json_output, process_contract_name, Source}; use crate::{ error::ContractVerifierError, resolver::{Compiler, CompilerPaths}, @@ -85,17 +85,7 @@ impl ZkSolc { pub fn build_input( req: VerificationIncomingRequest, ) -> Result { - // Users may provide either just contract name or - // source file name and contract name joined with ":". - let (file_name, contract_name) = - if let Some((file_name, contract_name)) = req.contract_name.rsplit_once(':') { - (file_name.to_string(), contract_name.to_string()) - } else { - ( - format!("{}.sol", req.contract_name), - req.contract_name.clone(), - ) - }; + let (file_name, contract_name) = process_contract_name(&req.contract_name, "sol"); let default_output_selection = serde_json::json!({ "*": { "*": [ "abi" ], diff --git a/core/lib/contract_verifier/src/compilers/zkvyper.rs b/core/lib/contract_verifier/src/compilers/zkvyper.rs index b3dacce64e77..4f7c10214f8a 100644 --- a/core/lib/contract_verifier/src/compilers/zkvyper.rs +++ b/core/lib/contract_verifier/src/compilers/zkvyper.rs @@ -1,21 +1,54 @@ -use std::{collections::HashMap, fs::File, io::Write, path::Path, process::Stdio}; +use std::{ffi::OsString, path, path::Path, process::Stdio}; use anyhow::Context as _; +use tokio::{fs, io::AsyncWriteExt}; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{ - CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, -}; +use zksync_types::contract_verification_api::CompilationArtifacts; +use super::VyperInput; use crate::{ error::ContractVerifierError, resolver::{Compiler, CompilerPaths}, }; -#[derive(Debug)] -pub(crate) struct ZkVyperInput { - pub contract_name: String, - pub sources: HashMap, - pub optimizer_mode: Option, +impl VyperInput { + async fn write_files(&self, root_dir: &Path) -> anyhow::Result> { + let mut paths = Vec::with_capacity(self.sources.len()); + for (name, content) in &self.sources { + let mut name = name.clone(); + if !name.ends_with(".vy") { + name += ".vy"; + } + + let name_path = Path::new(&name); + anyhow::ensure!( + !name_path.is_absolute(), + "absolute contract filename: {name}" + ); + let normal_components = name_path + .components() + .all(|component| matches!(component, path::Component::Normal(_))); + anyhow::ensure!( + normal_components, + "contract filename contains disallowed components: {name}" + ); + + let path = root_dir.join(name_path); + if let Some(prefix) = path.parent() { + fs::create_dir_all(prefix) + .await + .with_context(|| format!("failed creating parent dir for `{name}`"))?; + } + let mut file = fs::File::create(&path) + .await + .with_context(|| format!("failed creating file for `{name}`"))?; + file.write_all(content.as_bytes()) + .await + .with_context(|| format!("failed writing to `{name}`"))?; + paths.push(path.into_os_string()); + } + Ok(paths) + } } #[derive(Debug)] @@ -28,28 +61,6 @@ impl ZkVyper { Self { paths } } - pub fn build_input( - req: VerificationIncomingRequest, - ) -> Result { - // Users may provide either just contract name or - // source file name and contract name joined with ":". - let contract_name = if let Some((_, contract_name)) = req.contract_name.rsplit_once(':') { - contract_name.to_owned() - } else { - req.contract_name.clone() - }; - - let sources = match req.source_code_data { - SourceCodeData::VyperMultiFile(s) => s, - other => unreachable!("unexpected `SourceCodeData` variant: {other:?}"), - }; - Ok(ZkVyperInput { - contract_name, - sources, - optimizer_mode: req.optimizer_mode, - }) - } - fn parse_output( output: &serde_json::Value, contract_name: String, @@ -80,10 +91,10 @@ impl ZkVyper { } #[async_trait] -impl Compiler for ZkVyper { +impl Compiler for ZkVyper { async fn compile( self: Box, - input: ZkVyperInput, + input: VyperInput, ) -> Result { let mut command = tokio::process::Command::new(&self.paths.zk); if let Some(o) = input.optimizer_mode.as_ref() { @@ -97,22 +108,15 @@ impl Compiler for ZkVyper { .stdout(Stdio::piped()) .stderr(Stdio::piped()); - let temp_dir = tempfile::tempdir().context("failed creating temporary dir")?; - for (mut name, content) in input.sources { - if !name.ends_with(".vy") { - name += ".vy"; - } - let path = temp_dir.path().join(&name); - if let Some(prefix) = path.parent() { - std::fs::create_dir_all(prefix) - .with_context(|| format!("failed creating parent dir for `{name}`"))?; - } - let mut file = File::create(&path) - .with_context(|| format!("failed creating file for `{name}`"))?; - file.write_all(content.as_bytes()) - .with_context(|| format!("failed writing to `{name}`"))?; - command.arg(path.into_os_string()); - } + let temp_dir = tokio::task::spawn_blocking(tempfile::tempdir) + .await + .context("panicked creating temporary dir")? + .context("failed creating temporary dir")?; + let file_paths = input + .write_files(temp_dir.path()) + .await + .context("failed writing Vyper files to temp dir")?; + command.args(file_paths); let child = command.spawn().context("cannot spawn zkvyper")?; let output = child.wait_with_output().await.context("zkvyper failed")?; @@ -128,3 +132,36 @@ impl Compiler for ZkVyper { } } } + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + + #[tokio::test] + async fn sanitizing_contract_paths() { + let mut input = VyperInput { + contract_name: "Test".to_owned(), + file_name: "test.vy".to_owned(), + sources: HashMap::from([("/etc/shadow".to_owned(), String::new())]), + optimizer_mode: None, + }; + + let temp_dir = tempfile::TempDir::new().unwrap(); + let err = input + .write_files(temp_dir.path()) + .await + .unwrap_err() + .to_string(); + assert!(err.contains("absolute"), "{err}"); + + input.sources = HashMap::from([("../../../etc/shadow".to_owned(), String::new())]); + let err = input + .write_files(temp_dir.path()) + .await + .unwrap_err() + .to_string(); + assert!(err.contains("disallowed components"), "{err}"); + } +} diff --git a/core/lib/contract_verifier/src/lib.rs b/core/lib/contract_verifier/src/lib.rs index 686bb0d7bdc3..e8bf05c72e81 100644 --- a/core/lib/contract_verifier/src/lib.rs +++ b/core/lib/contract_verifier/src/lib.rs @@ -22,7 +22,7 @@ use zksync_types::{ use zksync_utils::bytecode::{prepare_evm_bytecode, BytecodeMarker}; use crate::{ - compilers::{Solc, ZkSolc, ZkVyper}, + compilers::{Solc, VyperInput, ZkSolc}, error::ContractVerifierError, metrics::API_CONTRACT_VERIFIER_METRICS, resolver::{CompilerResolver, EnvCompilerResolver}, @@ -47,7 +47,6 @@ struct ZkCompilerVersions { #[derive(Debug)] enum VersionedCompiler { Solc(String), - #[allow(dead_code)] // TODO (EVM-864): add vyper support Vyper(String), ZkSolc(ZkCompilerVersions), ZkVyper(ZkCompilerVersions), @@ -292,7 +291,7 @@ impl ContractVerifier { ) -> Result { let zkvyper = self.compiler_resolver.resolve_zkvyper(version).await?; tracing::debug!(?zkvyper, ?version, "resolved compiler"); - let input = ZkVyper::build_input(req)?; + let input = VyperInput::new(req)?; time::timeout(self.compilation_timeout, zkvyper.compile(input)) .await .map_err(|_| ContractVerifierError::CompilationTimeout)? @@ -312,6 +311,20 @@ impl ContractVerifier { .map_err(|_| ContractVerifierError::CompilationTimeout)? } + async fn compile_vyper( + &self, + version: &str, + req: VerificationIncomingRequest, + ) -> Result { + let vyper = self.compiler_resolver.resolve_vyper(version).await?; + tracing::debug!(?vyper, ?req.compiler_versions, "resolved compiler"); + let input = VyperInput::new(req)?; + + time::timeout(self.compilation_timeout, vyper.compile(input)) + .await + .map_err(|_| ContractVerifierError::CompilationTimeout)? + } + #[tracing::instrument(level = "debug", skip_all)] async fn compile( &self, @@ -340,11 +353,7 @@ impl ContractVerifier { match &compiler { VersionedCompiler::Solc(version) => self.compile_solc(version, req).await, - VersionedCompiler::Vyper(_) => { - // TODO (EVM-864): add vyper support - let err = anyhow::anyhow!("vyper toolchain is not yet supported for EVM contracts"); - return Err(err.into()); - } + VersionedCompiler::Vyper(version) => self.compile_vyper(version, req).await, VersionedCompiler::ZkSolc(version) => self.compile_zksolc(version, req).await, VersionedCompiler::ZkVyper(version) => self.compile_zkvyper(version, req).await, } diff --git a/core/lib/contract_verifier/src/resolver.rs b/core/lib/contract_verifier/src/resolver.rs index 34a70b759797..018da12a152a 100644 --- a/core/lib/contract_verifier/src/resolver.rs +++ b/core/lib/contract_verifier/src/resolver.rs @@ -10,7 +10,7 @@ use zksync_types::contract_verification_api::CompilationArtifacts; use zksync_utils::env::Workspace; use crate::{ - compilers::{Solc, SolcInput, ZkSolc, ZkSolcInput, ZkVyper, ZkVyperInput}, + compilers::{Solc, SolcInput, Vyper, VyperInput, ZkSolc, ZkSolcInput, ZkVyper}, error::ContractVerifierError, ZkCompilerVersions, }; @@ -115,11 +115,17 @@ pub(crate) trait CompilerResolver: fmt::Debug + Send + Sync { version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError>; + /// Resolves a `vyper` compiler. + async fn resolve_vyper( + &self, + version: &str, + ) -> Result>, ContractVerifierError>; + /// Resolves a `zkvyper` compiler. async fn resolve_zkvyper( &self, version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError>; + ) -> Result>, ContractVerifierError>; } /// Encapsulates a one-off compilation process. @@ -218,10 +224,20 @@ impl CompilerResolver for EnvCompilerResolver { ))) } + async fn resolve_vyper( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + let vyper_path = CompilerType::Vyper + .bin_path(&self.home_dir, version) + .await?; + Ok(Box::new(Vyper::new(vyper_path))) + } + async fn resolve_zkvyper( &self, version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError> { + ) -> Result>, ContractVerifierError> { let zkvyper_path = CompilerType::ZkVyper .bin_path(&self.home_dir, &version.zk) .await?; diff --git a/core/lib/contract_verifier/src/tests/mod.rs b/core/lib/contract_verifier/src/tests/mod.rs index 15951e578ff0..2aad39a12e0e 100644 --- a/core/lib/contract_verifier/src/tests/mod.rs +++ b/core/lib/contract_verifier/src/tests/mod.rs @@ -20,7 +20,7 @@ use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics, use super::*; use crate::{ - compilers::{SolcInput, ZkSolcInput, ZkVyperInput}, + compilers::{SolcInput, VyperInput, ZkSolcInput}, resolver::{Compiler, SupportedCompilerVersions}, }; @@ -53,6 +53,39 @@ const COUNTER_CONTRACT_WITH_CONSTRUCTOR: &str = r#" } } "#; +const COUNTER_CONTRACT_WITH_INTERFACE: &str = r#" + interface ICounter { + function increment(uint256 x) external; + } + + contract Counter is ICounter { + uint256 value; + + function increment(uint256 x) external override { + value += x; + } + } +"#; +const COUNTER_VYPER_CONTRACT: &str = r#" +#pragma version ^0.3.10 + +value: uint256 + +@external +def increment(x: uint256): + self.value += x +"#; +const EMPTY_YUL_CONTRACT: &str = r#" +object "Empty" { + code { + mstore(0, 0) + return(0, 32) + } + object "Empty_deployed" { + code { } + } +} +"#; #[derive(Debug, Clone, Copy)] enum TestContract { @@ -122,7 +155,7 @@ async fn mock_evm_deployment( calldata.extend_from_slice(ðabi::encode(constructor_args)); let deployment = Execute { contract_address: None, - calldata, // FIXME: check + calldata, value: 0.into(), factory_deps: vec![], }; @@ -295,10 +328,17 @@ impl CompilerResolver for MockCompilerResolver { Ok(Box::new(self.clone())) } + async fn resolve_vyper( + &self, + _version: &str, + ) -> Result>, ContractVerifierError> { + unreachable!("not tested") + } + async fn resolve_zkvyper( &self, _version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError> { + ) -> Result>, ContractVerifierError> { unreachable!("not tested") } } @@ -443,10 +483,32 @@ async fn assert_request_success( .unwrap() .expect("no verification info"); assert_eq!(verification_info.artifacts.bytecode, *expected_bytecode); - assert_eq!(verification_info.artifacts.abi, counter_contract_abi()); + assert_eq!( + without_internal_types(verification_info.artifacts.abi.clone()), + without_internal_types(counter_contract_abi()) + ); verification_info } +fn without_internal_types(mut abi: serde_json::Value) -> serde_json::Value { + let items = abi.as_array_mut().unwrap(); + for item in items { + if let Some(inputs) = item.get_mut("inputs") { + let inputs = inputs.as_array_mut().unwrap(); + for input in inputs { + input.as_object_mut().unwrap().remove("internalType"); + } + } + if let Some(outputs) = item.get_mut("outputs") { + let outputs = outputs.as_array_mut().unwrap(); + for output in outputs { + output.as_object_mut().unwrap().remove("internalType"); + } + } + } + abi +} + #[test_casing(2, TestContract::ALL)] #[tokio::test] async fn verifying_evm_bytecode(contract: TestContract) { diff --git a/core/lib/contract_verifier/src/tests/real.rs b/core/lib/contract_verifier/src/tests/real.rs index a7113044b405..4282e6de4ef8 100644 --- a/core/lib/contract_verifier/src/tests/real.rs +++ b/core/lib/contract_verifier/src/tests/real.rs @@ -4,14 +4,27 @@ use std::{env, sync::Arc, time::Duration}; +use assert_matches::assert_matches; use zksync_utils::bytecode::validate_bytecode; use super::*; +#[derive(Debug, Clone, Copy)] +enum Toolchain { + Solidity, + Vyper, +} + +impl Toolchain { + const ALL: [Self; 2] = [Self::Solidity, Self::Vyper]; +} + #[derive(Debug, Clone)] struct TestCompilerVersions { solc: String, zksolc: String, + vyper: String, + zkvyper: String, } impl TestCompilerVersions { @@ -23,6 +36,8 @@ impl TestCompilerVersions { Some(Self { solc, zksolc: versions.zksolc.pop()?, + vyper: versions.vyper.pop()?, + zkvyper: versions.zkvyper.pop()?, }) } @@ -42,6 +57,23 @@ impl TestCompilerVersions { }, } } + + fn zkvyper(self) -> ZkCompilerVersions { + ZkCompilerVersions { + base: self.vyper, + zk: self.zkvyper, + } + } + + fn vyper_for_api(self, bytecode_kind: BytecodeMarker) -> CompilerVersions { + CompilerVersions::Vyper { + compiler_vyper_version: self.vyper, + compiler_zkvyper_version: match bytecode_kind { + BytecodeMarker::Evm => None, + BytecodeMarker::EraVm => Some(self.zkvyper), + }, + } + } } async fn checked_env_resolver() -> Option<(EnvCompilerResolver, TestCompilerVersions)> { @@ -76,18 +108,23 @@ macro_rules! real_resolver { }; } +#[test_casing(2, [false, true])] #[tokio::test] -async fn using_real_compiler() { +async fn using_real_zksolc(specify_contract_file: bool) { let (compiler_resolver, supported_compilers) = real_resolver!(); let compiler = compiler_resolver .resolve_zksolc(&supported_compilers.clone().zksolc()) .await .unwrap(); - let req = VerificationIncomingRequest { + let mut req = VerificationIncomingRequest { compiler_versions: supported_compilers.solc_for_api(BytecodeMarker::EraVm), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; + if specify_contract_file { + set_multi_file_solc_input(&mut req); + } + let input = ZkSolc::build_input(req).unwrap(); let output = compiler.compile(input).await.unwrap(); @@ -95,19 +132,43 @@ async fn using_real_compiler() { assert_eq!(output.abi, counter_contract_abi()); } +fn set_multi_file_solc_input(req: &mut VerificationIncomingRequest) { + let input = serde_json::json!({ + "language": "Solidity", + "sources": { + "contracts/test.sol": { + "content": COUNTER_CONTRACT, + }, + }, + "settings": { + "optimizer": { "enabled": true }, + }, + }); + let serde_json::Value::Object(input) = input else { + unreachable!(); + }; + req.source_code_data = SourceCodeData::StandardJsonInput(input); + req.contract_name = "contracts/test.sol:Counter".to_owned(); +} + +#[test_casing(2, [false, true])] #[tokio::test] -async fn using_standalone_solc() { +async fn using_standalone_solc(specify_contract_file: bool) { let (compiler_resolver, supported_compilers) = real_resolver!(); let version = &supported_compilers.solc; let compiler = compiler_resolver.resolve_solc(version).await.unwrap(); - let req = VerificationIncomingRequest { + let mut req = VerificationIncomingRequest { compiler_versions: CompilerVersions::Solc { compiler_solc_version: version.clone(), compiler_zksolc_version: None, }, ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; + if specify_contract_file { + set_multi_file_solc_input(&mut req); + } + let input = Solc::build_input(req).unwrap(); let output = compiler.compile(input).await.unwrap(); @@ -115,18 +176,271 @@ async fn using_standalone_solc() { assert_eq!(output.abi, counter_contract_abi()); } -#[test_casing(2, BYTECODE_KINDS)] +#[test_casing(2, [false, true])] #[tokio::test] -async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { +async fn using_zksolc_with_abstract_contract(specify_contract_file: bool) { let (compiler_resolver, supported_compilers) = real_resolver!(); + let compiler = compiler_resolver + .resolve_zksolc(&supported_compilers.clone().zksolc()) + .await + .unwrap(); + let (source_code_data, contract_name) = if specify_contract_file { + let input = serde_json::json!({ + "language": "Solidity", + "sources": { + "contracts/test.sol": { + "content": COUNTER_CONTRACT_WITH_INTERFACE, + }, + }, + "settings": { + "optimizer": { "enabled": true }, + }, + }); + let serde_json::Value::Object(input) = input else { + unreachable!(); + }; + ( + SourceCodeData::StandardJsonInput(input), + "contracts/test.sol:ICounter", + ) + } else { + ( + SourceCodeData::SolSingleFile(COUNTER_CONTRACT_WITH_INTERFACE.to_owned()), + "ICounter", + ) + }; + let req = VerificationIncomingRequest { - compiler_versions: supported_compilers.clone().solc_for_api(bytecode_kind), - ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + contract_address: Address::repeat_byte(1), + compiler_versions: supported_compilers.solc_for_api(BytecodeMarker::EraVm), + optimization_used: true, + optimizer_mode: None, + constructor_arguments: Default::default(), + is_system: false, + source_code_data, + contract_name: contract_name.to_owned(), + force_evmla: false, + }; + + let input = ZkSolc::build_input(req).unwrap(); + let err = compiler.compile(input).await.unwrap_err(); + assert_matches!( + err, + ContractVerifierError::AbstractContract(name) if name == "ICounter" + ); +} + +fn test_yul_request(compiler_versions: CompilerVersions) -> VerificationIncomingRequest { + VerificationIncomingRequest { + contract_address: Default::default(), + source_code_data: SourceCodeData::YulSingleFile(EMPTY_YUL_CONTRACT.to_owned()), + contract_name: "Empty".to_owned(), + compiler_versions, + optimization_used: true, + optimizer_mode: None, + constructor_arguments: Default::default(), + is_system: false, + force_evmla: false, + } +} + +#[tokio::test] +async fn compiling_yul_with_zksolc() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = supported_compilers.clone().zksolc(); + let compiler = compiler_resolver.resolve_zksolc(&version).await.unwrap(); + let req = test_yul_request(supported_compilers.solc_for_api(BytecodeMarker::EraVm)); + let input = ZkSolc::build_input(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(!output.bytecode.is_empty()); + assert!(output.deployed_bytecode.is_none()); + assert_eq!(output.abi, serde_json::json!([])); +} + +#[tokio::test] +async fn compiling_standalone_yul() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.solc; + let compiler = compiler_resolver.resolve_solc(version).await.unwrap(); + let req = test_yul_request(CompilerVersions::Solc { + compiler_solc_version: version.clone(), + compiler_zksolc_version: None, + }); + let input = Solc::build_input(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(!output.bytecode.is_empty()); + assert_ne!(output.deployed_bytecode.unwrap(), output.bytecode); + assert_eq!(output.abi, serde_json::json!([])); +} + +fn test_vyper_request( + filename: &str, + contract_name: &str, + supported_compilers: TestCompilerVersions, + bytecode_kind: BytecodeMarker, +) -> VerificationIncomingRequest { + VerificationIncomingRequest { + contract_address: Address::repeat_byte(1), + source_code_data: SourceCodeData::VyperMultiFile(HashMap::from([( + filename.to_owned(), + COUNTER_VYPER_CONTRACT.to_owned(), + )])), + contract_name: contract_name.to_owned(), + compiler_versions: supported_compilers.vyper_for_api(bytecode_kind), + optimization_used: true, + optimizer_mode: None, + constructor_arguments: Default::default(), + is_system: false, + force_evmla: false, + } +} + +#[test_casing(2, [false, true])] +#[tokio::test] +async fn using_real_zkvyper(specify_contract_file: bool) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let compiler = compiler_resolver + .resolve_zkvyper(&supported_compilers.clone().zkvyper()) + .await + .unwrap(); + let (filename, contract_name) = if specify_contract_file { + ("contracts/Counter.vy", "contracts/Counter.vy:Counter") + } else { + ("Counter", "Counter") + }; + let req = test_vyper_request( + filename, + contract_name, + supported_compilers, + BytecodeMarker::EraVm, + ); + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + validate_bytecode(&output.bytecode).unwrap(); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[test_casing(2, [false, true])] +#[tokio::test] +async fn using_standalone_vyper(specify_contract_file: bool) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let (filename, contract_name) = if specify_contract_file { + ("contracts/Counter.vy", "contracts/Counter.vy:Counter") + } else { + ("Counter.vy", "Counter") + }; + let req = test_vyper_request( + filename, + contract_name, + supported_compilers, + BytecodeMarker::Evm, + ); + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(output.deployed_bytecode.is_some()); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[tokio::test] +async fn using_standalone_vyper_without_optimization() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let mut req = test_vyper_request( + "counter.vy", + "counter", + supported_compilers, + BytecodeMarker::Evm, + ); + req.optimization_used = false; + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(output.deployed_bytecode.is_some()); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[tokio::test] +async fn using_standalone_vyper_with_code_size_optimization() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let mut req = test_vyper_request( + "counter.vy", + "counter", + supported_compilers, + BytecodeMarker::Evm, + ); + req.optimization_used = true; + req.optimizer_mode = Some("codesize".to_owned()); + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(output.deployed_bytecode.is_some()); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[tokio::test] +async fn using_standalone_vyper_with_bogus_optimization() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let mut req = test_vyper_request( + "counter.vy", + "counter", + supported_compilers, + BytecodeMarker::Evm, + ); + req.optimization_used = true; + req.optimizer_mode = Some("???".to_owned()); + let input = VyperInput::new(req).unwrap(); + let err = compiler.compile(input).await.unwrap_err(); + + let ContractVerifierError::CompilationError(serde_json::Value::Array(errors)) = err else { + panic!("unexpected error: {err:?}"); + }; + let has_opt_level_error = errors + .iter() + .any(|err| err.as_str().unwrap().contains("optimization level")); + assert!(has_opt_level_error, "{errors:?}"); +} + +#[test_casing(4, Product((BYTECODE_KINDS, Toolchain::ALL)))] +#[tokio::test] +async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker, toolchain: Toolchain) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let req = match toolchain { + Toolchain::Solidity => VerificationIncomingRequest { + compiler_versions: supported_compilers.clone().solc_for_api(bytecode_kind), + ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + }, + Toolchain::Vyper => VerificationIncomingRequest { + compiler_versions: supported_compilers.clone().vyper_for_api(bytecode_kind), + source_code_data: SourceCodeData::VyperMultiFile(HashMap::from([( + "Counter.vy".to_owned(), + COUNTER_VYPER_CONTRACT.to_owned(), + )])), + ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + }, }; let address = Address::repeat_byte(1); - let output = match bytecode_kind { - BytecodeMarker::EraVm => { + let output = match (bytecode_kind, toolchain) { + (BytecodeMarker::EraVm, Toolchain::Solidity) => { let compiler = compiler_resolver .resolve_zksolc(&supported_compilers.zksolc()) .await @@ -134,12 +448,26 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { let input = ZkSolc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() } - BytecodeMarker::Evm => { + (BytecodeMarker::Evm, Toolchain::Solidity) => { let solc_version = &supported_compilers.solc; let compiler = compiler_resolver.resolve_solc(solc_version).await.unwrap(); let input = Solc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() } + (_, Toolchain::Vyper) => { + let compiler = match bytecode_kind { + BytecodeMarker::EraVm => compiler_resolver + .resolve_zkvyper(&supported_compilers.zkvyper()) + .await + .unwrap(), + BytecodeMarker::Evm => compiler_resolver + .resolve_vyper(&supported_compilers.vyper) + .await + .unwrap(), + }; + let input = VyperInput::new(req.clone()).unwrap(); + compiler.compile(input).await.unwrap() + } }; let pool = ConnectionPool::test_pool().await; diff --git a/core/lib/types/src/contract_verification_api.rs b/core/lib/types/src/contract_verification_api.rs index 21e511549beb..cca5ae5a83a0 100644 --- a/core/lib/types/src/contract_verification_api.rs +++ b/core/lib/types/src/contract_verification_api.rs @@ -137,6 +137,8 @@ pub struct VerificationIncomingRequest { #[serde(flatten)] pub compiler_versions: CompilerVersions, pub optimization_used: bool, + /// Optimization mode used for the contract. Semantics depends on the compiler used; e.g., for `vyper`, + /// allowed values are `gas` (default), `codesize` or `none`. pub optimizer_mode: Option, #[serde(default)] pub constructor_arguments: Bytes, From 407bb3d68e460d14175508ce49b0a60a9d43f675 Mon Sep 17 00:00:00 2001 From: D025 Date: Tue, 12 Nov 2024 12:57:47 +0200 Subject: [PATCH 19/23] ci: revert migrate release workflows to new reusable workflows (#3263) Reverts matter-labs/zksync-era#3198 --- .github/workflows/release-test-stage.yml | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 36a2b494c242..18708420dab0 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -61,11 +61,10 @@ jobs: build-push-core-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/new-build-core-template.yml + uses: ./.github/workflows/build-core-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} - action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -85,11 +84,10 @@ jobs: build-push-contract-verifier: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/new-build-contract-verifier-template.yml + uses: ./.github/workflows/build-contract-verifier-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} - action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -97,13 +95,12 @@ jobs: build-push-prover-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/new-build-prover-template.yml + uses: ./.github/workflows/build-prover-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" - action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -111,14 +108,13 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push prover images with avx512 instructions needs: [setup, changed_files] - uses: ./.github/workflows/new-build-witness-generator-template.yml + uses: ./.github/workflows/build-witness-generator-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl " - action: push secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} From 515887539d3eb4e47e8309957cc85a093aeeb47b Mon Sep 17 00:00:00 2001 From: D025 Date: Tue, 12 Nov 2024 13:50:54 +0200 Subject: [PATCH 20/23] ci: revert migrate docker from tag workflow to new templates #2 (#3264) Reverts matter-labs/zksync-era#3250 --- .github/workflows/build-docker-from-tag.yml | 12 ++++-------- .github/workflows/ci.yml | 1 - .github/workflows/new-build-prover-template.yml | 1 - .../new-build-witness-generator-template.yml | 6 ------ 4 files changed, 4 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index e2788064efef..206e15bd195f 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -49,7 +49,7 @@ jobs: build-push-core-images: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/new-build-core-template.yml + uses: ./.github/workflows/build-core-template.yml if: contains(github.ref_name, 'core') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -57,7 +57,6 @@ jobs: with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} en_alpha_release: true - action: "push" build-push-tee-prover-images: name: Build and push images @@ -74,25 +73,23 @@ jobs: build-push-contract-verifier: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/new-build-contract-verifier-template.yml + uses: ./.github/workflows/build-contract-verifier-template.yml if: contains(github.ref_name, 'contract_verifier') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} - action: "push" build-push-prover-images: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/new-build-prover-template.yml + uses: ./.github/workflows/build-prover-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" - action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -100,14 +97,13 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/new-build-witness-generator-template.yml + uses: ./.github/workflows/build-witness-generator-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" - action: push secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6dc91c6d82d1..2f29fe98f0e6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -178,7 +178,6 @@ jobs: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 action: "build" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" - ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml index 3a44fa807152..046711d679e8 100644 --- a/.github/workflows/new-build-prover-template.yml +++ b/.github/workflows/new-build-prover-template.yml @@ -158,7 +158,6 @@ jobs: SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE - ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} RUSTC_WRAPPER=sccache file: docker/${{ matrix.components }}/Dockerfile tags: | diff --git a/.github/workflows/new-build-witness-generator-template.yml b/.github/workflows/new-build-witness-generator-template.yml index 5644215f4352..2f1fc0b2dd86 100644 --- a/.github/workflows/new-build-witness-generator-template.yml +++ b/.github/workflows/new-build-witness-generator-template.yml @@ -9,10 +9,6 @@ on: description: "DOCKERHUB_TOKEN" required: true inputs: - ERA_BELLMAN_CUDA_RELEASE: - description: "ERA_BELLMAN_CUDA_RELEASE" - type: string - required: true image_tag_suffix: description: "Optional suffix to override tag name generation" type: string @@ -131,8 +127,6 @@ jobs: SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE RUSTC_WRAPPER=sccache - ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} - RUST_FLAGS=${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} file: docker/${{ matrix.components }}/Dockerfile tags: | us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} From 88a89405898653bf9e1a35a1374e000a8d67cf51 Mon Sep 17 00:00:00 2001 From: Ivan Schasny <31857042+ischasny@users.noreply.github.com> Date: Tue, 12 Nov 2024 12:13:21 +0000 Subject: [PATCH 21/23] fix: add deploy timestamp asserter command (#3252) * Add `deploy-timestamp-asserter` command to `zkstack` that works similarly to `deploy-multicall3` and others. The command deploys `TimestampAsserter` contract and updates it's address in `contracts.yaml`; * Update `contracts` submodule to the version that has the required function added. Sister [PR](https://github.com/matter-labs/era-contracts/pull/1058) in the `era-contracts` repo --- contracts | 2 +- .../crates/zkstack/completion/_zkstack.zsh | 43 ++++++++++ .../crates/zkstack/completion/zkstack.fish | 49 +++++++---- .../crates/zkstack/completion/zkstack.sh | 85 ++++++++++++++++++- .../src/commands/chain/deploy_l2_contracts.rs | 32 +++++++ .../crates/zkstack/src/commands/chain/mod.rs | 6 ++ 6 files changed, 195 insertions(+), 22 deletions(-) diff --git a/contracts b/contracts index 9fb1264fce8c..46d75088e7dd 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit 9fb1264fce8c0ebeefe8bf1846e89876027161d2 +Subproject commit 46d75088e7ddb534101874c3ec15b877da1cb417 diff --git a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh index 825fc967e6d7..d9977d574816 100644 --- a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh +++ b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh @@ -535,6 +535,23 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help (see more with '\''--help'\'')]' \ && ret=0 ;; +(deploy-timestamp-asserter) +_arguments "${_arguments_options[@]}" : \ +'--verify=[Verify deployed contracts]' \ +'--verifier=[Verifier to use]:VERIFIER:(etherscan sourcify blockscout oklink)' \ +'--verifier-url=[Verifier URL, if using a custom provider]:VERIFIER_URL:_default' \ +'--verifier-api-key=[Verifier API key]:VERIFIER_API_KEY:_default' \ +'*-a+[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'--resume[]' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +&& ret=0 +;; (deploy-upgrader) _arguments "${_arguments_options[@]}" : \ '--verify=[Verify deployed contracts]' \ @@ -674,6 +691,10 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(deploy-timestamp-asserter) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (deploy-upgrader) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2506,6 +2527,10 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(deploy-timestamp-asserter) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (deploy-upgrader) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2998,6 +3023,7 @@ _zkstack__chain_commands() { 'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ +'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ 'deploy-upgrader:Deploy Default Upgrader' \ 'deploy-paymaster:Deploy paymaster smart contract' \ 'update-token-multiplier-setter:Update Token Multiplier Setter address on L1' \ @@ -3040,6 +3066,11 @@ _zkstack__chain__deploy-paymaster_commands() { local commands; commands=() _describe -t commands 'zkstack chain deploy-paymaster commands' commands "$@" } +(( $+functions[_zkstack__chain__deploy-timestamp-asserter_commands] )) || +_zkstack__chain__deploy-timestamp-asserter_commands() { + local commands; commands=() + _describe -t commands 'zkstack chain deploy-timestamp-asserter commands' commands "$@" +} (( $+functions[_zkstack__chain__deploy-upgrader_commands] )) || _zkstack__chain__deploy-upgrader_commands() { local commands; commands=() @@ -3101,6 +3132,7 @@ _zkstack__chain__help_commands() { 'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ +'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ 'deploy-upgrader:Deploy Default Upgrader' \ 'deploy-paymaster:Deploy paymaster smart contract' \ 'update-token-multiplier-setter:Update Token Multiplier Setter address on L1' \ @@ -3143,6 +3175,11 @@ _zkstack__chain__help__deploy-paymaster_commands() { local commands; commands=() _describe -t commands 'zkstack chain help deploy-paymaster commands' commands "$@" } +(( $+functions[_zkstack__chain__help__deploy-timestamp-asserter_commands] )) || +_zkstack__chain__help__deploy-timestamp-asserter_commands() { + local commands; commands=() + _describe -t commands 'zkstack chain help deploy-timestamp-asserter commands' commands "$@" +} (( $+functions[_zkstack__chain__help__deploy-upgrader_commands] )) || _zkstack__chain__help__deploy-upgrader_commands() { local commands; commands=() @@ -4353,6 +4390,7 @@ _zkstack__help__chain_commands() { 'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ +'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ 'deploy-upgrader:Deploy Default Upgrader' \ 'deploy-paymaster:Deploy paymaster smart contract' \ 'update-token-multiplier-setter:Update Token Multiplier Setter address on L1' \ @@ -4394,6 +4432,11 @@ _zkstack__help__chain__deploy-paymaster_commands() { local commands; commands=() _describe -t commands 'zkstack help chain deploy-paymaster commands' commands "$@" } +(( $+functions[_zkstack__help__chain__deploy-timestamp-asserter_commands] )) || +_zkstack__help__chain__deploy-timestamp-asserter_commands() { + local commands; commands=() + _describe -t commands 'zkstack help chain deploy-timestamp-asserter commands' commands "$@" +} (( $+functions[_zkstack__help__chain__deploy-upgrader_commands] )) || _zkstack__help__chain__deploy-upgrader_commands() { local commands; commands=() diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.fish b/zkstack_cli/crates/zkstack/completion/zkstack.fish index 7ad4e6959f90..be6d5d147e78 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.fish +++ b/zkstack_cli/crates/zkstack/completion/zkstack.fish @@ -129,24 +129,25 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "change-default-chain" -d 'Change the default chain' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "setup-observability" -d 'Setup observability for the ecosystem, downloading Grafana dashboards from the era-observability repo' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "genesis" -d 'Run server genesis' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "genesis" -d 'Run server genesis' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-id -d 'Chain ID' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "{no-proofs\t'',gpu\t''}" @@ -265,6 +266,16 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier-url -d 'Verifier URL, if using a custom provider' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier-api-key -d 'Verifier API key' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s h -l help -d 'Print help (see more with \'--help\')' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier-url -d 'Verifier URL, if using a custom provider' -r @@ -305,6 +316,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' @@ -670,6 +682,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_su complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.sh b/zkstack_cli/crates/zkstack/completion/zkstack.sh index ff351ebd79ed..6261941e10f5 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.sh +++ b/zkstack_cli/crates/zkstack/completion/zkstack.sh @@ -78,6 +78,9 @@ _zkstack() { zkstack__chain,deploy-paymaster) cmd="zkstack__chain__deploy__paymaster" ;; + zkstack__chain,deploy-timestamp-asserter) + cmd="zkstack__chain__deploy__timestamp__asserter" + ;; zkstack__chain,deploy-upgrader) cmd="zkstack__chain__deploy__upgrader" ;; @@ -138,6 +141,9 @@ _zkstack() { zkstack__chain__help,deploy-paymaster) cmd="zkstack__chain__help__deploy__paymaster" ;; + zkstack__chain__help,deploy-timestamp-asserter) + cmd="zkstack__chain__help__deploy__timestamp__asserter" + ;; zkstack__chain__help,deploy-upgrader) cmd="zkstack__chain__help__deploy__upgrader" ;; @@ -747,6 +753,9 @@ _zkstack() { zkstack__help__chain,deploy-paymaster) cmd="zkstack__help__chain__deploy__paymaster" ;; + zkstack__help__chain,deploy-timestamp-asserter) + cmd="zkstack__help__chain__deploy__timestamp__asserter" + ;; zkstack__help__chain,deploy-upgrader) cmd="zkstack__help__chain__deploy__upgrader" ;; @@ -1048,7 +1057,7 @@ _zkstack() { return 0 ;; zkstack__chain) - opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" + opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1402,6 +1411,48 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__chain__deploy__timestamp__asserter) + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --verify) + COMPREPLY=($(compgen -W "true false" -- "${cur}")) + return 0 + ;; + --verifier) + COMPREPLY=($(compgen -W "etherscan sourcify blockscout oklink" -- "${cur}")) + return 0 + ;; + --verifier-url) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --verifier-api-key) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --additional-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -a) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__chain__deploy__upgrader) opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -1571,7 +1622,7 @@ _zkstack() { return 0 ;; zkstack__chain__help) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1682,6 +1733,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__chain__help__deploy__timestamp__asserter) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__chain__help__deploy__upgrader) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -5273,7 +5338,7 @@ _zkstack() { return 0 ;; zkstack__help__chain) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5384,6 +5449,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__chain__deploy__timestamp__asserter) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__chain__deploy__upgrader) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs index 091bef86d26d..31cfc7f83977 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs @@ -36,6 +36,7 @@ pub enum Deploy2ContractsOption { InitiailizeBridges, ConsensusRegistry, Multicall3, + TimestampAsserter, } pub async fn run( @@ -93,6 +94,16 @@ pub async fn run( ) .await?; } + Deploy2ContractsOption::TimestampAsserter => { + deploy_timestamp_asserter( + shell, + &chain_config, + &ecosystem_config, + &mut contracts, + args, + ) + .await?; + } Deploy2ContractsOption::InitiailizeBridges => { initialize_bridges( shell, @@ -213,6 +224,27 @@ pub async fn deploy_multicall3( .await } +pub async fn deploy_timestamp_asserter( + shell: &Shell, + chain_config: &ChainConfig, + ecosystem_config: &EcosystemConfig, + contracts_config: &mut ContractsConfig, + forge_args: ForgeScriptArgs, +) -> anyhow::Result<()> { + build_and_deploy( + shell, + chain_config, + ecosystem_config, + forge_args, + Some("runDeployTimestampAsserter"), + |shell, out| { + contracts_config + .set_timestamp_asserter_addr(&TimestampAsserterOutput::read(shell, out)?) + }, + ) + .await +} + pub async fn deploy_l2_contracts( shell: &Shell, chain_config: &ChainConfig, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs index c9a47616486d..82b8656154ab 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs @@ -56,6 +56,9 @@ pub enum ChainCommands { /// Deploy L2 multicall3 #[command(alias = "multicall3")] DeployMulticall3(ForgeScriptArgs), + /// Deploy L2 TimestampAsserter + #[command(alias = "timestamp-asserter")] + DeployTimestampAsserter(ForgeScriptArgs), /// Deploy Default Upgrader #[command(alias = "upgrader")] DeployUpgrader(ForgeScriptArgs), @@ -83,6 +86,9 @@ pub(crate) async fn run(shell: &Shell, args: ChainCommands) -> anyhow::Result<() ChainCommands::DeployMulticall3(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::Multicall3).await } + ChainCommands::DeployTimestampAsserter(args) => { + deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::TimestampAsserter).await + } ChainCommands::DeployUpgrader(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::Upgrader).await } From e92b8068d71fec032e498793ff97e329c3d073e0 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Tue, 12 Nov 2024 15:46:49 +0200 Subject: [PATCH 22/23] feat(zkstack): Introduce `build` and `wait` subcommands for components (#3240) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Introduces `build` and `wait` subcommands for server, EN and contract verifier. - Uses these commands in CI (integration tests job). ## Why ❔ These commands help preventing races between compilation and test logic. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. ci: Eliminate races between compilation and test logic in integration tests --- .github/workflows/ci-core-reusable.yml | 20 +- .../crates/zkstack/completion/_zkstack.zsh | 375 +++++++++++- .../crates/zkstack/completion/zkstack.fish | 131 ++++- .../crates/zkstack/completion/zkstack.sh | 556 +++++++++++++++++- .../crates/zkstack/src/commands/args/mod.rs | 6 +- .../zkstack/src/commands/args/run_server.rs | 53 +- .../crates/zkstack/src/commands/args/wait.rs | 130 ++++ .../zkstack/src/commands/consensus/mod.rs | 78 ++- .../src/commands/contract_verifier/build.rs | 26 + .../src/commands/contract_verifier/mod.rs | 18 +- .../src/commands/contract_verifier/run.rs | 2 +- .../src/commands/contract_verifier/wait.rs | 27 + .../src/commands/external_node/build.rs | 23 + .../zkstack/src/commands/external_node/mod.rs | 12 +- .../src/commands/external_node/wait.rs | 35 ++ .../crates/zkstack/src/commands/server.rs | 54 +- zkstack_cli/crates/zkstack/src/main.rs | 6 +- zkstack_cli/crates/zkstack/src/messages.rs | 62 +- 18 files changed, 1516 insertions(+), 98 deletions(-) create mode 100644 zkstack_cli/crates/zkstack/src/commands/args/wait.rs create mode 100644 zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs create mode 100644 zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs create mode 100644 zkstack_cli/crates/zkstack/src/commands/external_node/build.rs create mode 100644 zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index da3e2d5abb56..7cada37610c0 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -357,10 +357,17 @@ jobs: run: | ci_run zkstack dev test build + - name: Build tested binaries + run: | + ci_run zkstack server build + ci_run zkstack external-node build + ci_run zkstack contract-verifier build + - name: Initialize Contract verifier run: | ci_run zkstack contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era ci_run zkstack contract-verifier run --chain era &> ${{ env.SERVER_LOGS_DIR }}/contract-verifier-rollup.log & + ci_run zkstack contract-verifier wait --chain era --verbose - name: Run servers run: | @@ -375,10 +382,14 @@ jobs: --components=api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads,vm_runner_bwip,vm_playground,da_dispatcher,consensus \ &> ${{ env.SERVER_LOGS_DIR }}/consensus.log & - ci_run sleep 5 + ci_run zkstack server wait --ignore-prerequisites --verbose --chain era + ci_run zkstack server wait --ignore-prerequisites --verbose --chain validium + ci_run zkstack server wait --ignore-prerequisites --verbose --chain custom_token + ci_run zkstack server wait --ignore-prerequisites --verbose --chain consensus - - name: Setup attester committee for the consensus chain + - name: Set up attester committee for the consensus chain run: | + ci_run zkstack consensus wait-for-registry --ignore-prerequisites --verbose --chain consensus ci_run zkstack consensus set-attester-committee --chain consensus --from-genesis &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/consensus.log - name: Run integration tests @@ -417,6 +428,11 @@ jobs: ci_run zkstack external-node run --ignore-prerequisites --chain validium &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/validium.log & ci_run zkstack external-node run --ignore-prerequisites --chain custom_token &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/custom_token.log & ci_run zkstack external-node run --ignore-prerequisites --chain consensus --enable-consensus &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/consensus.log & + + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain era + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain validium + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain custom_token + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain consensus - name: Run integration tests en run: | diff --git a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh index d9977d574816..f0e10b465b6a 100644 --- a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh +++ b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh @@ -2008,15 +2008,96 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[Additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--genesis[Run server in genesis mode]' \ -'--build[Build server but don'\''t run it]' \ '--uring[Enables uring support for RocksDB]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ '-h[Print help]' \ '--help[Print help]' \ +":: :_zkstack__server_commands" \ +"*::: :->server" \ +&& ret=0 + + case $state in + (server) + words=($line[1] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:zkstack-server-command-$line[1]:" + case $line[1] in + (build) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +'*--components=[Components of server to run]:COMPONENTS:_default' \ +'*-a+[Additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'*--additional-args=[Additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'--genesis[Run server in genesis mode]' \ +'--uring[Enables uring support for RocksDB]' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ && ret=0 ;; +(help) +_arguments "${_arguments_options[@]}" : \ +":: :_zkstack__server__help_commands" \ +"*::: :->help" \ +&& ret=0 + + case $state in + (help) + words=($line[1] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:zkstack-server-help-command-$line[1]:" + case $line[1] in + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(help) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; + esac + ;; +esac +;; + esac + ;; +esac +;; (external-node) _arguments "${_arguments_options[@]}" : \ '--chain=[Chain to use]:CHAIN:_default' \ @@ -2060,6 +2141,16 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(build) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (run) _arguments "${_arguments_options[@]}" : \ '*--components=[Components of server to run]:COMPONENTS:_default' \ @@ -2075,6 +2166,19 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(wait) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ ":: :_zkstack__external-node__help_commands" \ @@ -2095,10 +2199,18 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (run) _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2141,8 +2253,31 @@ _arguments "${_arguments_options[@]}" : \ (( CURRENT += 1 )) curcontext="${curcontext%:*:*}:zkstack-contract-verifier-command-$line[1]:" case $line[1] in - (run) + (build) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(wait) _arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ @@ -2179,7 +2314,15 @@ _arguments "${_arguments_options[@]}" : \ (( CURRENT += 1 )) curcontext="${curcontext%:*:*}:zkstack-contract-verifier-help-command-$line[1]:" case $line[1] in - (run) + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) _arguments "${_arguments_options[@]}" : \ && ret=0 ;; @@ -2333,6 +2476,19 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(wait-for-registry) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ ":: :_zkstack__consensus__help_commands" \ @@ -2353,6 +2509,10 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(wait-for-registry) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2849,8 +3009,32 @@ esac ;; (server) _arguments "${_arguments_options[@]}" : \ +":: :_zkstack__help__server_commands" \ +"*::: :->server" \ +&& ret=0 + + case $state in + (server) + words=($line[1] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:zkstack-help-server-command-$line[1]:" + case $line[1] in + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; + esac + ;; +esac +;; (external-node) _arguments "${_arguments_options[@]}" : \ ":: :_zkstack__help__external-node_commands" \ @@ -2871,9 +3055,17 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (run) _arguments "${_arguments_options[@]}" : \ && ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 ;; esac ;; @@ -2895,7 +3087,15 @@ _arguments "${_arguments_options[@]}" : \ (( CURRENT += 1 )) curcontext="${curcontext%:*:*}:zkstack-help-contract-verifier-command-$line[1]:" case $line[1] in - (run) + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) _arguments "${_arguments_options[@]}" : \ && ret=0 ;; @@ -2958,6 +3158,10 @@ _arguments "${_arguments_options[@]}" : \ (get-attester-committee) _arguments "${_arguments_options[@]}" : \ && ret=0 +;; +(wait-for-registry) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 ;; esac ;; @@ -3286,6 +3490,7 @@ _zkstack__consensus_commands() { local commands; commands=( 'set-attester-committee:Sets the attester committee in the consensus registry contract to \`consensus.genesis_spec.attesters\` in general.yaml' \ 'get-attester-committee:Fetches the attester committee from the consensus registry contract' \ +'wait-for-registry:Wait until the consensus registry contract is deployed to L2' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack consensus commands' commands "$@" @@ -3300,6 +3505,7 @@ _zkstack__consensus__help_commands() { local commands; commands=( 'set-attester-committee:Sets the attester committee in the consensus registry contract to \`consensus.genesis_spec.attesters\` in general.yaml' \ 'get-attester-committee:Fetches the attester committee from the consensus registry contract' \ +'wait-for-registry:Wait until the consensus registry contract is deployed to L2' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack consensus help commands' commands "$@" @@ -3319,11 +3525,21 @@ _zkstack__consensus__help__set-attester-committee_commands() { local commands; commands=() _describe -t commands 'zkstack consensus help set-attester-committee commands' commands "$@" } +(( $+functions[_zkstack__consensus__help__wait-for-registry_commands] )) || +_zkstack__consensus__help__wait-for-registry_commands() { + local commands; commands=() + _describe -t commands 'zkstack consensus help wait-for-registry commands' commands "$@" +} (( $+functions[_zkstack__consensus__set-attester-committee_commands] )) || _zkstack__consensus__set-attester-committee_commands() { local commands; commands=() _describe -t commands 'zkstack consensus set-attester-committee commands' commands "$@" } +(( $+functions[_zkstack__consensus__wait-for-registry_commands] )) || +_zkstack__consensus__wait-for-registry_commands() { + local commands; commands=() + _describe -t commands 'zkstack consensus wait-for-registry commands' commands "$@" +} (( $+functions[_zkstack__containers_commands] )) || _zkstack__containers_commands() { local commands; commands=() @@ -3332,21 +3548,35 @@ _zkstack__containers_commands() { (( $+functions[_zkstack__contract-verifier_commands] )) || _zkstack__contract-verifier_commands() { local commands; commands=( +'build:Build contract verifier binary' \ 'run:Run contract verifier' \ +'wait:Wait for contract verifier to start' \ 'init:Download required binaries for contract verifier' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack contract-verifier commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__build_commands] )) || +_zkstack__contract-verifier__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier build commands' commands "$@" +} (( $+functions[_zkstack__contract-verifier__help_commands] )) || _zkstack__contract-verifier__help_commands() { local commands; commands=( +'build:Build contract verifier binary' \ 'run:Run contract verifier' \ +'wait:Wait for contract verifier to start' \ 'init:Download required binaries for contract verifier' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack contract-verifier help commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__help__build_commands] )) || +_zkstack__contract-verifier__help__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier help build commands' commands "$@" +} (( $+functions[_zkstack__contract-verifier__help__help_commands] )) || _zkstack__contract-verifier__help__help_commands() { local commands; commands=() @@ -3362,6 +3592,11 @@ _zkstack__contract-verifier__help__run_commands() { local commands; commands=() _describe -t commands 'zkstack contract-verifier help run commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__help__wait_commands] )) || +_zkstack__contract-verifier__help__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier help wait commands' commands "$@" +} (( $+functions[_zkstack__contract-verifier__init_commands] )) || _zkstack__contract-verifier__init_commands() { local commands; commands=() @@ -3372,6 +3607,11 @@ _zkstack__contract-verifier__run_commands() { local commands; commands=() _describe -t commands 'zkstack contract-verifier run commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__wait_commands] )) || +_zkstack__contract-verifier__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier wait commands' commands "$@" +} (( $+functions[_zkstack__dev_commands] )) || _zkstack__dev_commands() { local commands; commands=( @@ -4301,11 +4541,18 @@ _zkstack__external-node_commands() { local commands; commands=( 'configs:Prepare configs for EN' \ 'init:Init databases' \ +'build:Build external node' \ 'run:Run external node' \ +'wait:Wait for external node to start' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack external-node commands' commands "$@" } +(( $+functions[_zkstack__external-node__build_commands] )) || +_zkstack__external-node__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node build commands' commands "$@" +} (( $+functions[_zkstack__external-node__configs_commands] )) || _zkstack__external-node__configs_commands() { local commands; commands=() @@ -4316,11 +4563,18 @@ _zkstack__external-node__help_commands() { local commands; commands=( 'configs:Prepare configs for EN' \ 'init:Init databases' \ +'build:Build external node' \ 'run:Run external node' \ +'wait:Wait for external node to start' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack external-node help commands' commands "$@" } +(( $+functions[_zkstack__external-node__help__build_commands] )) || +_zkstack__external-node__help__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node help build commands' commands "$@" +} (( $+functions[_zkstack__external-node__help__configs_commands] )) || _zkstack__external-node__help__configs_commands() { local commands; commands=() @@ -4341,6 +4595,11 @@ _zkstack__external-node__help__run_commands() { local commands; commands=() _describe -t commands 'zkstack external-node help run commands' commands "$@" } +(( $+functions[_zkstack__external-node__help__wait_commands] )) || +_zkstack__external-node__help__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node help wait commands' commands "$@" +} (( $+functions[_zkstack__external-node__init_commands] )) || _zkstack__external-node__init_commands() { local commands; commands=() @@ -4351,6 +4610,11 @@ _zkstack__external-node__run_commands() { local commands; commands=() _describe -t commands 'zkstack external-node run commands' commands "$@" } +(( $+functions[_zkstack__external-node__wait_commands] )) || +_zkstack__external-node__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node wait commands' commands "$@" +} (( $+functions[_zkstack__help_commands] )) || _zkstack__help_commands() { local commands; commands=( @@ -4492,6 +4756,7 @@ _zkstack__help__consensus_commands() { local commands; commands=( 'set-attester-committee:Sets the attester committee in the consensus registry contract to \`consensus.genesis_spec.attesters\` in general.yaml' \ 'get-attester-committee:Fetches the attester committee from the consensus registry contract' \ +'wait-for-registry:Wait until the consensus registry contract is deployed to L2' \ ) _describe -t commands 'zkstack help consensus commands' commands "$@" } @@ -4505,6 +4770,11 @@ _zkstack__help__consensus__set-attester-committee_commands() { local commands; commands=() _describe -t commands 'zkstack help consensus set-attester-committee commands' commands "$@" } +(( $+functions[_zkstack__help__consensus__wait-for-registry_commands] )) || +_zkstack__help__consensus__wait-for-registry_commands() { + local commands; commands=() + _describe -t commands 'zkstack help consensus wait-for-registry commands' commands "$@" +} (( $+functions[_zkstack__help__containers_commands] )) || _zkstack__help__containers_commands() { local commands; commands=() @@ -4513,11 +4783,18 @@ _zkstack__help__containers_commands() { (( $+functions[_zkstack__help__contract-verifier_commands] )) || _zkstack__help__contract-verifier_commands() { local commands; commands=( +'build:Build contract verifier binary' \ 'run:Run contract verifier' \ +'wait:Wait for contract verifier to start' \ 'init:Download required binaries for contract verifier' \ ) _describe -t commands 'zkstack help contract-verifier commands' commands "$@" } +(( $+functions[_zkstack__help__contract-verifier__build_commands] )) || +_zkstack__help__contract-verifier__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack help contract-verifier build commands' commands "$@" +} (( $+functions[_zkstack__help__contract-verifier__init_commands] )) || _zkstack__help__contract-verifier__init_commands() { local commands; commands=() @@ -4528,6 +4805,11 @@ _zkstack__help__contract-verifier__run_commands() { local commands; commands=() _describe -t commands 'zkstack help contract-verifier run commands' commands "$@" } +(( $+functions[_zkstack__help__contract-verifier__wait_commands] )) || +_zkstack__help__contract-verifier__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack help contract-verifier wait commands' commands "$@" +} (( $+functions[_zkstack__help__dev_commands] )) || _zkstack__help__dev_commands() { local commands; commands=( @@ -4852,10 +5134,17 @@ _zkstack__help__external-node_commands() { local commands; commands=( 'configs:Prepare configs for EN' \ 'init:Init databases' \ +'build:Build external node' \ 'run:Run external node' \ +'wait:Wait for external node to start' \ ) _describe -t commands 'zkstack help external-node commands' commands "$@" } +(( $+functions[_zkstack__help__external-node__build_commands] )) || +_zkstack__help__external-node__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack help external-node build commands' commands "$@" +} (( $+functions[_zkstack__help__external-node__configs_commands] )) || _zkstack__help__external-node__configs_commands() { local commands; commands=() @@ -4871,6 +5160,11 @@ _zkstack__help__external-node__run_commands() { local commands; commands=() _describe -t commands 'zkstack help external-node run commands' commands "$@" } +(( $+functions[_zkstack__help__external-node__wait_commands] )) || +_zkstack__help__external-node__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack help external-node wait commands' commands "$@" +} (( $+functions[_zkstack__help__help_commands] )) || _zkstack__help__help_commands() { local commands; commands=() @@ -4924,9 +5218,28 @@ _zkstack__help__prover__setup-keys_commands() { } (( $+functions[_zkstack__help__server_commands] )) || _zkstack__help__server_commands() { - local commands; commands=() + local commands; commands=( +'build:Builds server' \ +'run:Runs server' \ +'wait:Waits for server to start' \ + ) _describe -t commands 'zkstack help server commands' commands "$@" } +(( $+functions[_zkstack__help__server__build_commands] )) || +_zkstack__help__server__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack help server build commands' commands "$@" +} +(( $+functions[_zkstack__help__server__run_commands] )) || +_zkstack__help__server__run_commands() { + local commands; commands=() + _describe -t commands 'zkstack help server run commands' commands "$@" +} +(( $+functions[_zkstack__help__server__wait_commands] )) || +_zkstack__help__server__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack help server wait commands' commands "$@" +} (( $+functions[_zkstack__help__update_commands] )) || _zkstack__help__update_commands() { local commands; commands=() @@ -5023,9 +5336,59 @@ _zkstack__prover__setup-keys_commands() { } (( $+functions[_zkstack__server_commands] )) || _zkstack__server_commands() { - local commands; commands=() + local commands; commands=( +'build:Builds server' \ +'run:Runs server' \ +'wait:Waits for server to start' \ +'help:Print this message or the help of the given subcommand(s)' \ + ) _describe -t commands 'zkstack server commands' commands "$@" } +(( $+functions[_zkstack__server__build_commands] )) || +_zkstack__server__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack server build commands' commands "$@" +} +(( $+functions[_zkstack__server__help_commands] )) || +_zkstack__server__help_commands() { + local commands; commands=( +'build:Builds server' \ +'run:Runs server' \ +'wait:Waits for server to start' \ +'help:Print this message or the help of the given subcommand(s)' \ + ) + _describe -t commands 'zkstack server help commands' commands "$@" +} +(( $+functions[_zkstack__server__help__build_commands] )) || +_zkstack__server__help__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help build commands' commands "$@" +} +(( $+functions[_zkstack__server__help__help_commands] )) || +_zkstack__server__help__help_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help help commands' commands "$@" +} +(( $+functions[_zkstack__server__help__run_commands] )) || +_zkstack__server__help__run_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help run commands' commands "$@" +} +(( $+functions[_zkstack__server__help__wait_commands] )) || +_zkstack__server__help__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help wait commands' commands "$@" +} +(( $+functions[_zkstack__server__run_commands] )) || +_zkstack__server__run_commands() { + local commands; commands=() + _describe -t commands 'zkstack server run commands' commands "$@" +} +(( $+functions[_zkstack__server__wait_commands] )) || +_zkstack__server__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack server wait commands' commands "$@" +} (( $+functions[_zkstack__update_commands] )) || _zkstack__update_commands() { local commands; commands=() diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.fish b/zkstack_cli/crates/zkstack/completion/zkstack.fish index be6d5d147e78..dacc27d88089 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.fish +++ b/zkstack_cli/crates/zkstack/completion/zkstack.fish @@ -525,23 +525,50 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from help" -f -a "init-bellman-cuda" -d 'Initialize bellman-cuda' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from help" -f -a "compressor-keys" -d 'Download compressor keys' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l components -d 'Components of server to run' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l genesis -d 'Run server in genesis mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l build -d 'Build server but don\'t run it' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l uring -d 'Enables uring support for RocksDB' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "configs" -d 'Prepare configs for EN' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "init" -d 'Init databases' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "run" -d 'Run external node' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l components -d 'Components of server to run' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l genesis -d 'Run server in genesis mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l uring -d 'Enables uring support for RocksDB' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "build" -d 'Builds server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "run" -d 'Runs server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "wait" -d 'Waits for server to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l components -d 'Components of server to run' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l genesis -d 'Run server in genesis mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l uring -d 'Enables uring support for RocksDB' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "build" -d 'Builds server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "run" -d 'Runs server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Waits for server to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "configs" -d 'Prepare configs for EN' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "init" -d 'Init databases' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "build" -d 'Build external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "run" -d 'Run external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "wait" -d 'Wait for external node to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from configs" -l db-url -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from configs" -l db-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from configs" -l l1-rpc-url -r @@ -554,6 +581,10 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fis complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l components -d 'Components of server to run' -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l enable-consensus -d 'Enable consensus' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r @@ -562,26 +593,46 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fis complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "configs" -d 'Prepare configs for EN' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "init" -d 'Init databases' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "build" -d 'Build external node' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Wait for external node to start' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s o -l observability -d 'Enable Grafana' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -f -a "run" -d 'Run contract verifier' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -f -a "init" -d 'Download required binaries for contract verifier' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "build" -d 'Build contract verifier binary' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "run" -d 'Run contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "wait" -d 'Wait for contract verifier to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "init" -d 'Download required binaries for contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l zksolc-version -d 'Version of zksolc to install' -r complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l zkvyper-version -d 'Version of zkvyper to install' -r complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l solc-version -d 'Version of solc to install' -r @@ -592,7 +643,9 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and _ complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "build" -d 'Build contract verifier binary' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Wait for contract verifier to start' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "init" -d 'Download required binaries for contract verifier' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand portal" -l chain -d 'Chain to use' -r @@ -623,13 +676,14 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_see complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_seen_subcommand_from help" -f -a "run-backend" -d 'Start explorer backend services (api, data_fetcher, worker) for a given chain. Uses default chain, unless --chain is passed' complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run explorer app' complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "wait-for-registry" -d 'Wait until the consensus registry contract is deployed to L2' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from set-attester-committee" -l from-file -d 'Sets the attester committee in the consensus registry contract to the committee in the yaml file. File format is definied in `commands/consensus/proto/mod.proto`' -r -F complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from set-attester-committee" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from set-attester-committee" -l from-genesis -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' @@ -640,8 +694,15 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from get-attester-committee" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from get-attester-committee" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from get-attester-committee" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "wait-for-registry" -d 'Wait until the consensus registry contract is deployed to L2' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand update" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand update" -s c -l only-config -d 'Update only the config files' @@ -703,13 +764,21 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_su complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from prover" -f -a "run" -d 'Run prover' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from prover" -f -a "init-bellman-cuda" -d 'Initialize bellman-cuda' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from prover" -f -a "compressor-keys" -d 'Download compressor keys' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from server" -f -a "build" -d 'Builds server' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from server" -f -a "run" -d 'Runs server' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from server" -f -a "wait" -d 'Waits for server to start' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "configs" -d 'Prepare configs for EN' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "init" -d 'Init databases' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "build" -d 'Build external node' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "run" -d 'Run external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "wait" -d 'Wait for external node to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "build" -d 'Build contract verifier binary' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "run" -d 'Run contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "wait" -d 'Wait for contract verifier to start' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "init" -d 'Download required binaries for contract verifier' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from explorer" -f -a "init" -d 'Initialize explorer (create database to store explorer data and generate docker compose file with explorer services). Runs for all chains, unless --chain is passed' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from explorer" -f -a "run-backend" -d 'Start explorer backend services (api, data_fetcher, worker) for a given chain. Uses default chain, unless --chain is passed' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from explorer" -f -a "run" -d 'Run explorer app' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from consensus" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from consensus" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from consensus" -f -a "wait-for-registry" -d 'Wait until the consensus registry contract is deployed to L2' diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.sh b/zkstack_cli/crates/zkstack/completion/zkstack.sh index 6261941e10f5..0cf89ed4ef3f 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.sh +++ b/zkstack_cli/crates/zkstack/completion/zkstack.sh @@ -195,6 +195,9 @@ _zkstack() { zkstack__consensus,set-attester-committee) cmd="zkstack__consensus__set__attester__committee" ;; + zkstack__consensus,wait-for-registry) + cmd="zkstack__consensus__wait__for__registry" + ;; zkstack__consensus__help,get-attester-committee) cmd="zkstack__consensus__help__get__attester__committee" ;; @@ -204,6 +207,12 @@ _zkstack() { zkstack__consensus__help,set-attester-committee) cmd="zkstack__consensus__help__set__attester__committee" ;; + zkstack__consensus__help,wait-for-registry) + cmd="zkstack__consensus__help__wait__for__registry" + ;; + zkstack__contract__verifier,build) + cmd="zkstack__contract__verifier__build" + ;; zkstack__contract__verifier,help) cmd="zkstack__contract__verifier__help" ;; @@ -213,6 +222,12 @@ _zkstack() { zkstack__contract__verifier,run) cmd="zkstack__contract__verifier__run" ;; + zkstack__contract__verifier,wait) + cmd="zkstack__contract__verifier__wait" + ;; + zkstack__contract__verifier__help,build) + cmd="zkstack__contract__verifier__help__build" + ;; zkstack__contract__verifier__help,help) cmd="zkstack__contract__verifier__help__help" ;; @@ -222,6 +237,9 @@ _zkstack() { zkstack__contract__verifier__help,run) cmd="zkstack__contract__verifier__help__run" ;; + zkstack__contract__verifier__help,wait) + cmd="zkstack__contract__verifier__help__wait" + ;; zkstack__dev,clean) cmd="zkstack__dev__clean" ;; @@ -663,6 +681,9 @@ _zkstack() { zkstack__explorer__help,run-backend) cmd="zkstack__explorer__help__run__backend" ;; + zkstack__external__node,build) + cmd="zkstack__external__node__build" + ;; zkstack__external__node,configs) cmd="zkstack__external__node__configs" ;; @@ -675,6 +696,12 @@ _zkstack() { zkstack__external__node,run) cmd="zkstack__external__node__run" ;; + zkstack__external__node,wait) + cmd="zkstack__external__node__wait" + ;; + zkstack__external__node__help,build) + cmd="zkstack__external__node__help__build" + ;; zkstack__external__node__help,configs) cmd="zkstack__external__node__help__configs" ;; @@ -687,6 +714,9 @@ _zkstack() { zkstack__external__node__help,run) cmd="zkstack__external__node__help__run" ;; + zkstack__external__node__help,wait) + cmd="zkstack__external__node__help__wait" + ;; zkstack__help,autocomplete) cmd="zkstack__help__autocomplete" ;; @@ -789,12 +819,21 @@ _zkstack() { zkstack__help__consensus,set-attester-committee) cmd="zkstack__help__consensus__set__attester__committee" ;; + zkstack__help__consensus,wait-for-registry) + cmd="zkstack__help__consensus__wait__for__registry" + ;; + zkstack__help__contract__verifier,build) + cmd="zkstack__help__contract__verifier__build" + ;; zkstack__help__contract__verifier,init) cmd="zkstack__help__contract__verifier__init" ;; zkstack__help__contract__verifier,run) cmd="zkstack__help__contract__verifier__run" ;; + zkstack__help__contract__verifier,wait) + cmd="zkstack__help__contract__verifier__wait" + ;; zkstack__help__dev,clean) cmd="zkstack__help__dev__clean" ;; @@ -942,6 +981,9 @@ _zkstack() { zkstack__help__explorer,run-backend) cmd="zkstack__help__explorer__run__backend" ;; + zkstack__help__external__node,build) + cmd="zkstack__help__external__node__build" + ;; zkstack__help__external__node,configs) cmd="zkstack__help__external__node__configs" ;; @@ -951,6 +993,9 @@ _zkstack() { zkstack__help__external__node,run) cmd="zkstack__help__external__node__run" ;; + zkstack__help__external__node,wait) + cmd="zkstack__help__external__node__wait" + ;; zkstack__help__prover,compressor-keys) cmd="zkstack__help__prover__compressor__keys" ;; @@ -966,6 +1011,15 @@ _zkstack() { zkstack__help__prover,setup-keys) cmd="zkstack__help__prover__setup__keys" ;; + zkstack__help__server,build) + cmd="zkstack__help__server__build" + ;; + zkstack__help__server,run) + cmd="zkstack__help__server__run" + ;; + zkstack__help__server,wait) + cmd="zkstack__help__server__wait" + ;; zkstack__prover,compressor-keys) cmd="zkstack__prover__compressor__keys" ;; @@ -1002,6 +1056,30 @@ _zkstack() { zkstack__prover__help,setup-keys) cmd="zkstack__prover__help__setup__keys" ;; + zkstack__server,build) + cmd="zkstack__server__build" + ;; + zkstack__server,help) + cmd="zkstack__server__help" + ;; + zkstack__server,run) + cmd="zkstack__server__run" + ;; + zkstack__server,wait) + cmd="zkstack__server__wait" + ;; + zkstack__server__help,build) + cmd="zkstack__server__help__build" + ;; + zkstack__server__help,help) + cmd="zkstack__server__help__help" + ;; + zkstack__server__help,run) + cmd="zkstack__server__help__run" + ;; + zkstack__server__help,wait) + cmd="zkstack__server__help__wait" + ;; *) ;; esac @@ -2144,7 +2222,7 @@ _zkstack() { return 0 ;; zkstack__consensus) - opts="-v -h --verbose --chain --ignore-prerequisites --help set-attester-committee get-attester-committee help" + opts="-v -h --verbose --chain --ignore-prerequisites --help set-attester-committee get-attester-committee wait-for-registry help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2180,7 +2258,7 @@ _zkstack() { return 0 ;; zkstack__consensus__help) - opts="set-attester-committee get-attester-committee help" + opts="set-attester-committee get-attester-committee wait-for-registry help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2235,6 +2313,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__consensus__help__wait__for__registry) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__consensus__set__attester__committee) opts="-v -h --from-genesis --from-file --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -2257,6 +2349,36 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__consensus__wait__for__registry) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__containers) opts="-o -v -h --observability --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then @@ -2284,7 +2406,7 @@ _zkstack() { return 0 ;; zkstack__contract__verifier) - opts="-v -h --verbose --chain --ignore-prerequisites --help run init help" + opts="-v -h --verbose --chain --ignore-prerequisites --help build run wait init help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2301,8 +2423,26 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__build) + opts="-v -h --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__contract__verifier__help) - opts="run init help" + opts="build run wait init help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2315,6 +2455,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__help__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__contract__verifier__help__help) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -2357,6 +2511,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__help__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__contract__verifier__init) opts="-v -h --zksolc-version --zkvyper-version --solc-version --era-vm-solc-version --vyper-version --only --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -2413,6 +2581,36 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__wait) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__dev) opts="-v -h --verbose --chain --ignore-prerequisites --help database test clean snapshot lint fmt prover contracts config-writer send-transactions status generate-genesis help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then @@ -5140,7 +5338,7 @@ _zkstack() { return 0 ;; zkstack__external__node) - opts="-v -h --verbose --chain --ignore-prerequisites --help configs init run help" + opts="-v -h --verbose --chain --ignore-prerequisites --help configs init build run wait help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5157,6 +5355,24 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__build) + opts="-v -h --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__external__node__configs) opts="-u -v -h --db-url --db-name --l1-rpc-url --use-default --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -5188,7 +5404,7 @@ _zkstack() { return 0 ;; zkstack__external__node__help) - opts="configs init run help" + opts="configs init build run wait help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5201,6 +5417,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__help__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__external__node__help__configs) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -5257,6 +5487,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__help__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__external__node__init) opts="-v -h --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -5309,6 +5553,36 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__wait) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help) opts="autocomplete ecosystem chain dev prover server external-node containers contract-verifier portal explorer consensus update markdown help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then @@ -5590,7 +5864,7 @@ _zkstack() { return 0 ;; zkstack__help__consensus) - opts="set-attester-committee get-attester-committee" + opts="set-attester-committee get-attester-committee wait-for-registry" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5631,6 +5905,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__consensus__wait__for__registry) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__containers) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -5646,7 +5934,7 @@ _zkstack() { return 0 ;; zkstack__help__contract__verifier) - opts="run init" + opts="build run wait init" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5659,6 +5947,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__contract__verifier__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__contract__verifier__init) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -5687,6 +5989,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__contract__verifier__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__dev) opts="database test clean snapshot lint fmt prover contracts config-writer send-transactions status generate-genesis" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -6416,7 +6732,7 @@ _zkstack() { return 0 ;; zkstack__help__external__node) - opts="configs init run" + opts="configs init build run wait" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -6429,6 +6745,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__external__node__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__external__node__configs) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -6471,6 +6801,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__external__node__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__help) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -6598,7 +6942,7 @@ _zkstack() { return 0 ;; zkstack__help__server) - opts="" + opts="build run wait" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -6611,6 +6955,48 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__server__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__help__server__run) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__help__server__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__update) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -7028,7 +7414,7 @@ _zkstack() { return 0 ;; zkstack__server) - opts="-a -v -h --components --genesis --additional-args --build --uring --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --components --genesis --additional-args --uring --verbose --chain --ignore-prerequisites --help build run wait help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -7057,6 +7443,154 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__server__build) + opts="-v -h --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help) + opts="build run wait help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__help) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__run) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__run) + opts="-a -v -h --components --genesis --additional-args --uring --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --components) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --additional-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -a) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__wait) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__update) opts="-c -v -h --only-config --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then diff --git a/zkstack_cli/crates/zkstack/src/commands/args/mod.rs b/zkstack_cli/crates/zkstack/src/commands/args/mod.rs index 5fa83aadf51f..477f3a6ae9af 100644 --- a/zkstack_cli/crates/zkstack/src/commands/args/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/args/mod.rs @@ -1,9 +1,7 @@ -pub use autocomplete::*; -pub use containers::*; -pub use run_server::*; -pub use update::*; +pub use self::{autocomplete::*, containers::*, run_server::*, update::*, wait::*}; mod autocomplete; mod containers; mod run_server; mod update; +mod wait; diff --git a/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs b/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs index d090c0de03f9..40344c90ad05 100644 --- a/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs @@ -1,22 +1,53 @@ -use clap::Parser; +use clap::{Parser, Subcommand}; use serde::{Deserialize, Serialize}; -use crate::messages::{ - MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_BUILD_HELP, MSG_SERVER_COMPONENTS_HELP, - MSG_SERVER_GENESIS_HELP, MSG_SERVER_URING_HELP, +use crate::{ + commands::args::WaitArgs, + messages::{ + MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_COMPONENTS_HELP, MSG_SERVER_GENESIS_HELP, + MSG_SERVER_URING_HELP, + }, }; +#[derive(Debug, Parser)] +#[command(args_conflicts_with_subcommands = true, flatten_help = true)] +pub struct ServerArgs { + #[command(subcommand)] + command: Option, + #[command(flatten)] + run: RunServerArgs, +} + +#[derive(Debug, Subcommand)] +pub enum ServerCommand { + /// Builds server + Build, + /// Runs server + Run(RunServerArgs), + /// Waits for server to start + Wait(WaitArgs), +} + +impl From for ServerCommand { + fn from(args: ServerArgs) -> Self { + args.command.unwrap_or(ServerCommand::Run(args.run)) + } +} + #[derive(Debug, Serialize, Deserialize, Parser)] pub struct RunServerArgs { - #[clap(long, help = MSG_SERVER_COMPONENTS_HELP)] + #[arg(long, help = MSG_SERVER_COMPONENTS_HELP)] pub components: Option>, - #[clap(long, help = MSG_SERVER_GENESIS_HELP)] + #[arg(long, help = MSG_SERVER_GENESIS_HELP)] pub genesis: bool, - #[clap(long, short)] - #[arg(trailing_var_arg = true, allow_hyphen_values = true, hide = false, help = MSG_SERVER_ADDITIONAL_ARGS_HELP)] + #[arg( + long, short, + trailing_var_arg = true, + allow_hyphen_values = true, + hide = false, + help = MSG_SERVER_ADDITIONAL_ARGS_HELP + )] additional_args: Vec, - #[clap(long, help = MSG_SERVER_BUILD_HELP)] - pub build: bool, - #[clap(help=MSG_SERVER_URING_HELP, long, default_missing_value = "true")] + #[clap(help = MSG_SERVER_URING_HELP, long, default_missing_value = "true")] pub uring: bool, } diff --git a/zkstack_cli/crates/zkstack/src/commands/args/wait.rs b/zkstack_cli/crates/zkstack/src/commands/args/wait.rs new file mode 100644 index 000000000000..a3a7e32ae8b4 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/args/wait.rs @@ -0,0 +1,130 @@ +use std::{fmt, future::Future, time::Duration}; + +use anyhow::Context as _; +use clap::Parser; +use common::logger; +use reqwest::StatusCode; +use serde::{Deserialize, Serialize}; +use tokio::time::MissedTickBehavior; + +use crate::messages::{ + msg_wait_connect_err, msg_wait_non_successful_response, msg_wait_not_healthy, + msg_wait_starting_polling, msg_wait_timeout, MSG_WAIT_POLL_INTERVAL_HELP, + MSG_WAIT_TIMEOUT_HELP, +}; + +#[derive(Debug, Clone, Copy)] +enum PolledComponent { + Prometheus, + HealthCheck, +} + +impl fmt::Display for PolledComponent { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(match self { + Self::Prometheus => "Prometheus", + Self::HealthCheck => "health check", + }) + } +} + +#[derive(Debug, Parser, Serialize, Deserialize)] +pub struct WaitArgs { + #[arg(long, short = 't', value_name = "SECONDS", help = MSG_WAIT_TIMEOUT_HELP)] + timeout: Option, + #[arg(long, value_name = "MILLIS", help = MSG_WAIT_POLL_INTERVAL_HELP, default_value_t = 100)] + poll_interval: u64, +} + +impl WaitArgs { + pub fn poll_interval(&self) -> Duration { + Duration::from_millis(self.poll_interval) + } + + pub async fn poll_prometheus(&self, port: u16, verbose: bool) -> anyhow::Result<()> { + let component = PolledComponent::Prometheus; + let url = format!("http://127.0.0.1:{port}/metrics"); + self.poll_with_timeout(component, self.poll_inner(component, &url, verbose)) + .await + } + + pub async fn poll_health_check(&self, port: u16, verbose: bool) -> anyhow::Result<()> { + let component = PolledComponent::HealthCheck; + let url = format!("http://127.0.0.1:{port}/health"); + self.poll_with_timeout(component, self.poll_inner(component, &url, verbose)) + .await + } + + pub async fn poll_with_timeout( + &self, + component: impl fmt::Display, + action: impl Future>, + ) -> anyhow::Result<()> { + match self.timeout { + None => action.await, + Some(timeout) => tokio::time::timeout(Duration::from_secs(timeout), action) + .await + .map_err(|_| anyhow::Error::msg(msg_wait_timeout(&component)))?, + } + } + + async fn poll_inner( + &self, + component: PolledComponent, + url: &str, + verbose: bool, + ) -> anyhow::Result<()> { + let poll_interval = Duration::from_millis(self.poll_interval); + let mut interval = tokio::time::interval(poll_interval); + interval.set_missed_tick_behavior(MissedTickBehavior::Skip); + + if verbose { + logger::debug(msg_wait_starting_polling(&component, url, poll_interval)); + } + + let client = reqwest::Client::builder() + .connect_timeout(poll_interval) + .build() + .context("failed to build reqwest::Client")?; + + loop { + interval.tick().await; + + let response = match client.get(url).send().await { + Ok(response) => response, + Err(err) if err.is_connect() || err.is_timeout() => { + continue; + } + Err(err) => { + return Err( + anyhow::Error::new(err).context(msg_wait_connect_err(&component, url)) + ) + } + }; + + match component { + PolledComponent::Prometheus => { + response + .error_for_status() + .with_context(|| msg_wait_non_successful_response(&component))?; + return Ok(()); + } + PolledComponent::HealthCheck => { + if response.status().is_success() { + return Ok(()); + } + + if response.status() == StatusCode::SERVICE_UNAVAILABLE { + if verbose { + logger::debug(msg_wait_not_healthy(url)); + } + } else { + response + .error_for_status() + .with_context(|| msg_wait_non_successful_response(&component))?; + } + } + } + } + } +} diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs index 1855a5943dc7..7a998efedbf2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs @@ -3,22 +3,23 @@ use std::{borrow::Borrow, collections::HashMap, path::PathBuf, sync::Arc}; /// Consensus registry contract operations. /// Includes code duplicated from `zksync_node_consensus::registry::abi`. use anyhow::Context as _; -use common::{logger, wallets::Wallet}; +use common::{config::global_config, logger, wallets::Wallet}; use config::EcosystemConfig; use conv::*; use ethers::{ abi::Detokenize, contract::{FunctionCall, Multicall}, middleware::{Middleware, NonceManagerMiddleware, SignerMiddleware}, - providers::{Http, JsonRpcClient, PendingTransaction, Provider, RawCall as _}, + providers::{Http, JsonRpcClient, PendingTransaction, Provider, ProviderError, RawCall as _}, signers::{LocalWallet, Signer as _}, types::{Address, BlockId, H256}, }; +use tokio::time::MissedTickBehavior; use xshell::Shell; use zksync_consensus_crypto::ByteFmt; use zksync_consensus_roles::{attester, validator}; -use crate::{messages, utils::consensus::parse_attester_committee}; +use crate::{commands::args::WaitArgs, messages, utils::consensus::parse_attester_committee}; mod conv; mod proto; @@ -92,6 +93,8 @@ pub enum Command { SetAttesterCommittee(SetAttesterCommitteeCommand), /// Fetches the attester committee from the consensus registry contract. GetAttesterCommittee, + /// Wait until the consensus registry contract is deployed to L2. + WaitForRegistry(WaitArgs), } /// Collection of sent transactions. @@ -210,15 +213,18 @@ impl Setup { }) } + fn consensus_registry_addr(&self) -> anyhow::Result
{ + self.contracts + .l2 + .consensus_registry + .context(messages::MSG_CONSENSUS_REGISTRY_ADDRESS_NOT_CONFIGURED) + } + fn consensus_registry( &self, m: Arc, ) -> anyhow::Result> { - let addr = self - .contracts - .l2 - .consensus_registry - .context(messages::MSG_CONSENSUS_REGISTRY_ADDRESS_NOT_CONFIGURED)?; + let addr = self.consensus_registry_addr()?; Ok(abi::ConsensusRegistry::new(addr, m)) } @@ -276,6 +282,58 @@ impl Setup { parse_attester_committee(attesters).context("parse_attester_committee()") } + async fn wait_for_registry_contract_inner( + &self, + args: &WaitArgs, + verbose: bool, + ) -> anyhow::Result<()> { + let addr = self.consensus_registry_addr()?; + let provider = self.provider().context("provider()")?; + let mut interval = tokio::time::interval(args.poll_interval()); + interval.set_missed_tick_behavior(MissedTickBehavior::Skip); + + if verbose { + logger::debug(messages::msg_wait_consensus_registry_started_polling( + addr, + provider.url(), + )); + } + + loop { + interval.tick().await; + + let code = match provider.get_code(addr, None).await { + Ok(code) => code, + Err(ProviderError::HTTPError(err)) if err.is_connect() || err.is_timeout() => { + continue; + } + Err(err) => { + return Err(anyhow::Error::new(err) + .context(messages::MSG_CONSENSUS_REGISTRY_POLL_ERROR)) + } + }; + if !code.is_empty() { + logger::info(messages::msg_consensus_registry_wait_success( + addr, + code.len(), + )); + return Ok(()); + } + } + } + + async fn wait_for_registry_contract( + &self, + args: &WaitArgs, + verbose: bool, + ) -> anyhow::Result<()> { + args.poll_with_timeout( + messages::MSG_CONSENSUS_REGISTRY_WAIT_COMPONENT, + self.wait_for_registry_contract_inner(args, verbose), + ) + .await + } + async fn set_attester_committee(&self, want: &attester::Committee) -> anyhow::Result<()> { let provider = self.provider().context("provider()")?; let block_id = self.last_block(&provider).await.context("last_block()")?; @@ -410,6 +468,10 @@ impl Command { let got = setup.get_attester_committee().await?; print_attesters(&got); } + Self::WaitForRegistry(args) => { + let verbose = global_config().verbose; + setup.wait_for_registry_contract(&args, verbose).await?; + } } Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs new file mode 100644 index 000000000000..0ba72f6b2257 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs @@ -0,0 +1,26 @@ +use anyhow::Context; +use common::{cmd::Cmd, logger}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use crate::messages::{ + MSG_BUILDING_CONTRACT_VERIFIER, MSG_CHAIN_NOT_FOUND_ERR, + MSG_FAILED_TO_BUILD_CONTRACT_VERIFIER_ERR, +}; + +pub(crate) async fn build(shell: &Shell) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_current_chain() + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let _dir_guard = shell.push_dir(&chain.link_to_code); + + logger::info(MSG_BUILDING_CONTRACT_VERIFIER); + + let mut cmd = Cmd::new(cmd!( + shell, + "cargo build --release --bin zksync_contract_verifier" + )); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_FAILED_TO_BUILD_CONTRACT_VERIFIER_ERR) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs index 78bdc5fae7ec..e36e6ba62e7b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs @@ -1,22 +1,32 @@ -use args::init::InitContractVerifierArgs; use clap::Subcommand; use xshell::Shell; -pub mod args; -pub mod init; -pub mod run; +use self::args::init::InitContractVerifierArgs; +use crate::commands::args::WaitArgs; + +mod args; +mod build; +mod init; +mod run; +mod wait; #[derive(Subcommand, Debug)] pub enum ContractVerifierCommands { + /// Build contract verifier binary + Build, /// Run contract verifier Run, + /// Wait for contract verifier to start + Wait(WaitArgs), /// Download required binaries for contract verifier Init(InitContractVerifierArgs), } pub(crate) async fn run(shell: &Shell, args: ContractVerifierCommands) -> anyhow::Result<()> { match args { + ContractVerifierCommands::Build => build::build(shell).await, ContractVerifierCommands::Run => run::run(shell).await, + ContractVerifierCommands::Wait(args) => wait::wait(shell, args).await, ContractVerifierCommands::Init(args) => init::run(shell, args).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs index 9913ec817e90..ebc33840bdea 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs @@ -22,7 +22,7 @@ pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { let mut cmd = Cmd::new(cmd!( shell, - "cargo run --bin zksync_contract_verifier -- --config-path={config_path} --secrets-path={secrets_path}" + "cargo run --release --bin zksync_contract_verifier -- --config-path={config_path} --secrets-path={secrets_path}" )); cmd = cmd.with_force_run(); cmd.run().context(MSG_FAILED_TO_RUN_CONTRACT_VERIFIER_ERR) diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs new file mode 100644 index 000000000000..011c888d3041 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs @@ -0,0 +1,27 @@ +use anyhow::Context as _; +use common::{config::global_config, logger}; +use config::EcosystemConfig; +use xshell::Shell; + +use crate::{commands::args::WaitArgs, messages::MSG_CHAIN_NOT_FOUND_ERR}; + +pub(crate) async fn wait(shell: &Shell, args: WaitArgs) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_current_chain() + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let verbose = global_config().verbose; + + let prometheus_port = chain + .get_general_config()? + .contract_verifier + .as_ref() + .context("contract verifier config not specified")? + .prometheus_port; + logger::info("Waiting for contract verifier to become alive"); + args.poll_prometheus(prometheus_port, verbose).await?; + logger::info(format!( + "Contract verifier is alive with Prometheus server bound to :{prometheus_port}" + )); + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs new file mode 100644 index 000000000000..ff15c0c77f30 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs @@ -0,0 +1,23 @@ +use anyhow::Context; +use common::{cmd::Cmd, logger}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use crate::messages::{MSG_BUILDING_EN, MSG_CHAIN_NOT_FOUND_ERR, MSG_FAILED_TO_BUILD_EN_ERR}; + +pub(crate) async fn build(shell: &Shell) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_current_chain() + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let _dir_guard = shell.push_dir(&chain.link_to_code); + + logger::info(MSG_BUILDING_EN); + + let mut cmd = Cmd::new(cmd!( + shell, + "cargo build --release --bin zksync_external_node" + )); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_FAILED_TO_BUILD_EN_ERR) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs index 095566d24e87..7bd366d5871c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs @@ -1,12 +1,16 @@ -use args::{prepare_configs::PrepareConfigArgs, run::RunExternalNodeArgs}; use clap::Parser; use serde::{Deserialize, Serialize}; use xshell::Shell; +use self::args::{prepare_configs::PrepareConfigArgs, run::RunExternalNodeArgs}; +use crate::commands::args::WaitArgs; + mod args; +mod build; mod init; mod prepare_configs; mod run; +mod wait; #[derive(Debug, Serialize, Deserialize, Parser)] pub enum ExternalNodeCommands { @@ -14,14 +18,20 @@ pub enum ExternalNodeCommands { Configs(PrepareConfigArgs), /// Init databases Init, + /// Build external node + Build, /// Run external node Run(RunExternalNodeArgs), + /// Wait for external node to start + Wait(WaitArgs), } pub async fn run(shell: &Shell, commands: ExternalNodeCommands) -> anyhow::Result<()> { match commands { ExternalNodeCommands::Configs(args) => prepare_configs::run(shell, args), ExternalNodeCommands::Init => init::run(shell).await, + ExternalNodeCommands::Build => build::build(shell).await, ExternalNodeCommands::Run(args) => run::run(shell, args).await, + ExternalNodeCommands::Wait(args) => wait::wait(shell, args).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs new file mode 100644 index 000000000000..72568c36f363 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs @@ -0,0 +1,35 @@ +use anyhow::Context as _; +use common::{config::global_config, logger}; +use config::{traits::ReadConfigWithBasePath, EcosystemConfig}; +use xshell::Shell; +use zksync_config::configs::GeneralConfig; + +use crate::{ + commands::args::WaitArgs, + messages::{msg_waiting_for_en_success, MSG_CHAIN_NOT_INITIALIZED, MSG_WAITING_FOR_EN}, +}; + +pub async fn wait(shell: &Shell, args: WaitArgs) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_current_chain() + .context(MSG_CHAIN_NOT_INITIALIZED)?; + let verbose = global_config().verbose; + + let en_path = chain_config + .external_node_config_path + .clone() + .context("External node is not initialized")?; + let general_config = GeneralConfig::read_with_base_path(shell, &en_path)?; + let health_check_port = general_config + .api_config + .as_ref() + .context("no API config")? + .healthcheck + .port; + + logger::info(MSG_WAITING_FOR_EN); + args.poll_health_check(health_check_port, verbose).await?; + logger::info(msg_waiting_for_en_success(health_check_port)); + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/server.rs b/zkstack_cli/crates/zkstack/src/commands/server.rs index be7a676a8252..10f267fb8526 100644 --- a/zkstack_cli/crates/zkstack/src/commands/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/server.rs @@ -1,5 +1,7 @@ use anyhow::Context; use common::{ + cmd::Cmd, + config::global_config, logger, server::{Server, ServerMode}, }; @@ -7,25 +9,38 @@ use config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; -use xshell::Shell; +use xshell::{cmd, Shell}; use crate::{ - commands::args::RunServerArgs, - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_FAILED_TO_RUN_SERVER_ERR, MSG_STARTING_SERVER}, + commands::args::{RunServerArgs, ServerArgs, ServerCommand, WaitArgs}, + messages::{ + msg_waiting_for_server_success, MSG_BUILDING_SERVER, MSG_CHAIN_NOT_INITIALIZED, + MSG_FAILED_TO_BUILD_SERVER_ERR, MSG_FAILED_TO_RUN_SERVER_ERR, MSG_STARTING_SERVER, + MSG_WAITING_FOR_SERVER, + }, }; -pub fn run(shell: &Shell, args: RunServerArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: ServerArgs) -> anyhow::Result<()> { let ecosystem_config = EcosystemConfig::from_file(shell)?; - let chain_config = ecosystem_config .load_current_chain() .context(MSG_CHAIN_NOT_INITIALIZED)?; - logger::info(MSG_STARTING_SERVER); + match ServerCommand::from(args) { + ServerCommand::Run(args) => run_server(args, &chain_config, shell), + ServerCommand::Build => build_server(&chain_config, shell), + ServerCommand::Wait(args) => wait_for_server(args, &chain_config).await, + } +} - run_server(args, &chain_config, shell)?; +fn build_server(chain_config: &ChainConfig, shell: &Shell) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(&chain_config.link_to_code); - Ok(()) + logger::info(MSG_BUILDING_SERVER); + + let mut cmd = Cmd::new(cmd!(shell, "cargo build --release --bin zksync_server")); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_FAILED_TO_BUILD_SERVER_ERR) } fn run_server( @@ -33,17 +48,13 @@ fn run_server( chain_config: &ChainConfig, shell: &Shell, ) -> anyhow::Result<()> { + logger::info(MSG_STARTING_SERVER); let server = Server::new( args.components.clone(), chain_config.link_to_code.clone(), args.uring, ); - if args.build { - server.build(shell)?; - return Ok(()); - } - let mode = if args.genesis { ServerMode::Genesis } else { @@ -62,3 +73,20 @@ fn run_server( ) .context(MSG_FAILED_TO_RUN_SERVER_ERR) } + +async fn wait_for_server(args: WaitArgs, chain_config: &ChainConfig) -> anyhow::Result<()> { + let verbose = global_config().verbose; + + let health_check_port = chain_config + .get_general_config()? + .api_config + .as_ref() + .context("no API config")? + .healthcheck + .port; + + logger::info(MSG_WAITING_FOR_SERVER); + args.poll_health_check(health_check_port, verbose).await?; + logger::info(msg_waiting_for_server_success(health_check_port)); + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/main.rs b/zkstack_cli/crates/zkstack/src/main.rs index 3ebe26a4fa21..8a115201fc81 100644 --- a/zkstack_cli/crates/zkstack/src/main.rs +++ b/zkstack_cli/crates/zkstack/src/main.rs @@ -15,7 +15,7 @@ use config::EcosystemConfig; use xshell::Shell; use crate::commands::{ - args::RunServerArgs, chain::ChainCommands, consensus, ecosystem::EcosystemCommands, + args::ServerArgs, chain::ChainCommands, consensus, ecosystem::EcosystemCommands, explorer::ExplorerCommands, external_node::ExternalNodeCommands, prover::ProverCommands, }; @@ -57,7 +57,7 @@ pub enum ZkStackSubcommands { #[command(subcommand, alias = "p")] Prover(ProverCommands), /// Run server - Server(RunServerArgs), + Server(ServerArgs), /// External Node related commands #[command(subcommand, alias = "en")] ExternalNode(ExternalNodeCommands), @@ -136,7 +136,7 @@ async fn run_subcommand(zkstack_args: ZkStack) -> anyhow::Result<()> { ZkStackSubcommands::Chain(args) => commands::chain::run(&shell, *args).await?, ZkStackSubcommands::Dev(args) => commands::dev::run(&shell, args).await?, ZkStackSubcommands::Prover(args) => commands::prover::run(&shell, args).await?, - ZkStackSubcommands::Server(args) => commands::server::run(&shell, args)?, + ZkStackSubcommands::Server(args) => commands::server::run(&shell, args).await?, ZkStackSubcommands::Containers(args) => commands::containers::run(&shell, args)?, ZkStackSubcommands::ExternalNode(args) => { commands::external_node::run(&shell, args).await? diff --git a/zkstack_cli/crates/zkstack/src/messages.rs b/zkstack_cli/crates/zkstack/src/messages.rs index 516194ef721e..bedcb233b19f 100644 --- a/zkstack_cli/crates/zkstack/src/messages.rs +++ b/zkstack_cli/crates/zkstack/src/messages.rs @@ -1,9 +1,10 @@ -use std::path::Path; +use std::{fmt, path::Path, time::Duration}; use ethers::{ - types::{H160, U256}, + types::{Address, H160, U256}, utils::format_ether, }; +use url::Url; use zksync_consensus_roles::attester; pub(super) const MSG_SETUP_KEYS_DOWNLOAD_SELECTION_PROMPT: &str = @@ -264,7 +265,6 @@ pub(super) const MSG_ENABLE_CONSENSUS_HELP: &str = "Enable consensus"; pub(super) const MSG_SERVER_GENESIS_HELP: &str = "Run server in genesis mode"; pub(super) const MSG_SERVER_ADDITIONAL_ARGS_HELP: &str = "Additional arguments that can be passed through the CLI"; -pub(super) const MSG_SERVER_BUILD_HELP: &str = "Build server but don't run it"; pub(super) const MSG_SERVER_URING_HELP: &str = "Enables uring support for RocksDB"; /// Accept ownership related messages @@ -284,6 +284,13 @@ pub(super) const MSG_OBSERVABILITY_RUN_PROMPT: &str = "Do you want to run observ pub(super) const MSG_STARTING_SERVER: &str = "Starting server"; pub(super) const MSG_FAILED_TO_RUN_SERVER_ERR: &str = "Failed to start server"; pub(super) const MSG_PREPARING_EN_CONFIGS: &str = "Preparing External Node config"; +pub(super) const MSG_BUILDING_SERVER: &str = "Building server"; +pub(super) const MSG_FAILED_TO_BUILD_SERVER_ERR: &str = "Failed to build server"; +pub(super) const MSG_WAITING_FOR_SERVER: &str = "Waiting for server to start"; + +pub(super) fn msg_waiting_for_server_success(health_check_port: u16) -> String { + format!("Server is alive with health check server on :{health_check_port}") +} /// Portal related messages pub(super) const MSG_PORTAL_FAILED_TO_FIND_ANY_CHAIN_ERR: &str = @@ -351,7 +358,14 @@ pub(super) const MSG_CONSENSUS_CONFIG_MISSING_ERR: &str = "Consensus config is m pub(super) const MSG_CONSENSUS_SECRETS_MISSING_ERR: &str = "Consensus secrets config is missing"; pub(super) const MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR: &str = "Consensus node key is missing"; +pub(super) const MSG_BUILDING_EN: &str = "Building external node"; +pub(super) const MSG_FAILED_TO_BUILD_EN_ERR: &str = "Failed to build external node"; pub(super) const MSG_STARTING_EN: &str = "Starting external node"; +pub(super) const MSG_WAITING_FOR_EN: &str = "Waiting for external node to start"; + +pub(super) fn msg_waiting_for_en_success(health_check_port: u16) -> String { + format!("External node is alive with health check server on :{health_check_port}") +} /// Prover related messages pub(super) const MSG_GENERATING_SK_SPINNER: &str = "Generating setup keys..."; @@ -429,7 +443,10 @@ pub(super) fn msg_bucket_created(bucket_name: &str) -> String { } /// Contract verifier related messages +pub(super) const MSG_BUILDING_CONTRACT_VERIFIER: &str = "Building contract verifier"; pub(super) const MSG_RUNNING_CONTRACT_VERIFIER: &str = "Running contract verifier"; +pub(super) const MSG_FAILED_TO_BUILD_CONTRACT_VERIFIER_ERR: &str = + "Failed to build contract verifier"; pub(super) const MSG_FAILED_TO_RUN_CONTRACT_VERIFIER_ERR: &str = "Failed to run contract verifier"; pub(super) const MSG_INVALID_ARCH_ERR: &str = "Invalid arch"; pub(super) const MSG_GET_ZKSOLC_RELEASES_ERR: &str = "Failed to get zksolc releases"; @@ -478,6 +495,34 @@ pub(super) const MSG_DIFF_EN_GENERAL_CONFIG: &str = "Added the following fields to the external node generalconfig:"; pub(super) const MSG_UPDATING_ERA_OBSERVABILITY_SPINNER: &str = "Updating era observability..."; +/// Wait-related messages +pub(super) const MSG_WAIT_TIMEOUT_HELP: &str = "Wait timeout in seconds"; +pub(super) const MSG_WAIT_POLL_INTERVAL_HELP: &str = "Poll interval in milliseconds"; + +pub(super) fn msg_wait_starting_polling( + component: &impl fmt::Display, + url: &str, + poll_interval: Duration, +) -> String { + format!("Starting polling {component} at `{url}` each {poll_interval:?}") +} + +pub(super) fn msg_wait_timeout(component: &impl fmt::Display) -> String { + format!("timed out polling {component}") +} + +pub(super) fn msg_wait_connect_err(component: &impl fmt::Display, url: &str) -> String { + format!("failed to connect to {component} at `{url}`") +} + +pub(super) fn msg_wait_non_successful_response(component: &impl fmt::Display) -> String { + format!("non-successful {component} response") +} + +pub(super) fn msg_wait_not_healthy(url: &str) -> String { + format!("Node at `{url}` is not healthy") +} + pub(super) fn msg_diff_genesis_config(chain: &str) -> String { format!( "Found differences between chain {chain} and era genesis configs. Consider updating the chain {chain} genesis config and re-running genesis. Diff:" @@ -516,9 +561,20 @@ pub(super) const MSG_CONSENSUS_REGISTRY_ADDRESS_NOT_CONFIGURED: &str = "consensus registry address not configured"; pub(super) const MSG_CONSENSUS_GENESIS_SPEC_ATTESTERS_MISSING_IN_GENERAL_YAML: &str = "consensus.genesis_spec.attesters missing in general.yaml"; +pub(super) const MSG_CONSENSUS_REGISTRY_POLL_ERROR: &str = "failed querying L2 node"; +pub(super) const MSG_CONSENSUS_REGISTRY_WAIT_COMPONENT: &str = "main node HTTP RPC"; + pub(super) fn msg_setting_attester_committee_failed( got: &attester::Committee, want: &attester::Committee, ) -> String { format!("setting attester committee failed: got {got:?}, want {want:?}") } + +pub(super) fn msg_wait_consensus_registry_started_polling(addr: Address, url: &Url) -> String { + format!("Starting polling L2 HTTP RPC at {url} for code at {addr:?}") +} + +pub(super) fn msg_consensus_registry_wait_success(addr: Address, code_len: usize) -> String { + format!("Consensus registry is deployed at {addr:?}: {code_len} bytes") +} From f3a2517a132b036ca70bc18aa8ac9f6da1cbc049 Mon Sep 17 00:00:00 2001 From: perekopskiy <53865202+perekopskiy@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:43:07 +0200 Subject: [PATCH 23/23] feat(vm): add gateway changes to fast vm (#3236) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Ports VM changes to vm_fast that are needed for gateway version. Code changes are mostly copied from vm_latest ## Why ❔ vm_fast should support new protocol version ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/lib/multivm/src/glue/tracers/mod.rs | 16 +-- core/lib/multivm/src/lib.rs | 2 +- core/lib/multivm/src/utils/mod.rs | 18 +-- .../vm_fast/bootloader_state/state.rs | 36 ++++- .../vm_fast/bootloader_state/utils.rs | 70 +++++++--- core/lib/multivm/src/versions/vm_fast/mod.rs | 3 +- .../multivm/src/versions/vm_fast/pubdata.rs | 123 ------------------ .../multivm/src/versions/vm_fast/tests/mod.rs | 2 +- .../multivm/src/versions/vm_fast/version.rs | 28 ++++ core/lib/multivm/src/versions/vm_fast/vm.rs | 88 ++++++++----- .../src/versions/vm_latest/constants.rs | 22 ++-- .../lib/multivm/src/versions/vm_latest/mod.rs | 2 +- .../src/versions/vm_latest/tests/constants.rs | 2 +- .../src/versions/vm_latest/tests/mod.rs | 2 +- .../vm_latest/tracers/default_tracers.rs | 6 +- .../vm_latest/tracers/pubdata_tracer.rs | 8 +- .../src/versions/vm_latest/tracers/refunds.rs | 6 +- .../vm_latest/tracers/result_tracer.rs | 8 +- .../src/versions/vm_latest/tracers/utils.rs | 10 +- core/lib/multivm/src/versions/vm_latest/vm.rs | 10 +- .../src/versions/vm_m5/oracle_tools.rs | 4 +- .../src/versions/vm_m5/oracles/storage.rs | 10 +- core/lib/multivm/src/versions/vm_m5/vm.rs | 8 +- .../multivm/src/versions/vm_m5/vm_instance.rs | 8 +- .../src/versions/vm_m5/vm_with_bootloader.rs | 8 +- core/lib/multivm/src/versions/vm_m6/vm.rs | 8 +- .../multivm/src/versions/vm_m6/vm_instance.rs | 4 +- .../src/versions/vm_m6/vm_with_bootloader.rs | 18 +-- core/lib/multivm/src/vm_instance.rs | 20 ++- core/lib/vm_executor/src/batch/factory.rs | 2 +- core/lib/vm_executor/src/oneshot/contracts.rs | 14 +- core/lib/vm_executor/src/oneshot/mod.rs | 4 +- core/node/api_server/src/tx_sender/mod.rs | 6 +- core/node/consensus/src/vm.rs | 4 +- core/node/genesis/src/utils.rs | 4 +- 35 files changed, 278 insertions(+), 306 deletions(-) delete mode 100644 core/lib/multivm/src/versions/vm_fast/pubdata.rs create mode 100644 core/lib/multivm/src/versions/vm_fast/version.rs diff --git a/core/lib/multivm/src/glue/tracers/mod.rs b/core/lib/multivm/src/glue/tracers/mod.rs index bf2f67cae501..f5a854ecbaaf 100644 --- a/core/lib/multivm/src/glue/tracers/mod.rs +++ b/core/lib/multivm/src/glue/tracers/mod.rs @@ -7,7 +7,7 @@ //! Different VM versions may have distinct requirements and types for Tracers. To accommodate these differences, //! this module defines one primary trait: //! -//! - `MultiVMTracer`: This trait represents a tracer that can be converted into a tracer for +//! - `MultiVmTracer`: This trait represents a tracer that can be converted into a tracer for //! a specific VM version. //! //! Specific traits for each VM version, which support Custom Tracers: @@ -19,22 +19,22 @@ //! into a form compatible with the vm_virtual_blocks version. //! It defines a method `vm_virtual_blocks` for obtaining a boxed tracer. //! -//! For `MultiVMTracer` to be implemented, the Tracer must implement all N currently +//! For `MultiVmTracer` to be implemented, the Tracer must implement all N currently //! existing sub-traits. //! //! ## Adding a new VM version //! -//! To add support for one more VM version to MultiVMTracer, one needs to: +//! To add support for one more VM version to MultiVmTracer, one needs to: //! - Create a new trait performing conversion to the specified VM tracer, e.g., `IntoTracer`. -//! - Add this trait as a trait bound to the `MultiVMTracer`. -//! - Add this trait as a trait bound for `T` in `MultiVMTracer` implementation. +//! - Add this trait as a trait bound to the `MultiVmTracer`. +//! - Add this trait as a trait bound for `T` in `MultiVmTracer` implementation. //! - Implement the trait for `T` with a bound to `VmTracer` for a specific version. use crate::{interface::storage::WriteStorage, tracers::old::OldTracers, HistoryMode}; -pub type MultiVmTracerPointer = Box>; +pub type MultiVmTracerPointer = Box>; -pub trait MultiVMTracer: +pub trait MultiVmTracer: IntoLatestTracer + IntoVmVirtualBlocksTracer + IntoVmRefundsEnhancementTracer @@ -168,7 +168,7 @@ where } } -impl MultiVMTracer for T +impl MultiVmTracer for T where S: WriteStorage, H: HistoryMode, diff --git a/core/lib/multivm/src/lib.rs b/core/lib/multivm/src/lib.rs index 1cba2c0fb92b..fc4085d9b021 100644 --- a/core/lib/multivm/src/lib.rs +++ b/core/lib/multivm/src/lib.rs @@ -10,7 +10,7 @@ pub use zksync_vm_interface as interface; pub use crate::{ glue::{ history_mode::HistoryMode, - tracers::{MultiVMTracer, MultiVmTracerPointer}, + tracers::{MultiVmTracer, MultiVmTracerPointer}, }, versions::{ vm_1_3_2, vm_1_4_1, vm_1_4_2, vm_boojum_integration, vm_fast, vm_latest, vm_m5, vm_m6, diff --git a/core/lib/multivm/src/utils/mod.rs b/core/lib/multivm/src/utils/mod.rs index a55adb16c85a..4332c0327ff1 100644 --- a/core/lib/multivm/src/utils/mod.rs +++ b/core/lib/multivm/src/utils/mod.rs @@ -239,16 +239,16 @@ pub fn get_bootloader_encoding_space(version: VmVersion) -> u32 { VmVersion::Vm1_4_2 => crate::vm_1_4_2::constants::BOOTLOADER_TX_ENCODING_SPACE, VmVersion::Vm1_5_0SmallBootloaderMemory => { crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ) } VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ) } VmVersion::VmGateway => crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ), } } @@ -394,16 +394,16 @@ pub fn get_used_bootloader_memory_bytes(version: VmVersion) -> usize { VmVersion::Vm1_4_2 => crate::vm_1_4_2::constants::USED_BOOTLOADER_MEMORY_BYTES, VmVersion::Vm1_5_0SmallBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ) } VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ) } VmVersion::VmGateway => crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ), } } @@ -430,16 +430,16 @@ pub fn get_used_bootloader_memory_words(version: VmVersion) -> usize { VmVersion::Vm1_4_2 => crate::vm_1_4_2::constants::USED_BOOTLOADER_MEMORY_WORDS, VmVersion::Vm1_5_0SmallBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ) } VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ) } VmVersion::VmGateway => crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ), } } diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs index 15b4daf02a77..e104eba6ef4f 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs @@ -1,7 +1,7 @@ use std::cmp::Ordering; use once_cell::sync::OnceCell; -use zksync_types::{L2ChainId, U256}; +use zksync_types::{L2ChainId, ProtocolVersionId, U256}; use super::{ l2_block::BootloaderL2Block, @@ -10,8 +10,11 @@ use super::{ BootloaderStateSnapshot, }; use crate::{ - interface::{BootloaderMemory, CompressedBytecodeInfo, L2BlockEnv, TxExecutionMode}, - versions::vm_fast::{pubdata::PubdataInput, transaction_data::TransactionData}, + interface::{ + pubdata::{PubdataBuilder, PubdataInput}, + BootloaderMemory, CompressedBytecodeInfo, L2BlockEnv, TxExecutionMode, + }, + versions::vm_fast::transaction_data::TransactionData, vm_latest::{constants::TX_DESCRIPTION_OFFSET, utils::l2_blocks::assert_next_block}, }; @@ -42,6 +45,8 @@ pub struct BootloaderState { free_tx_offset: usize, /// Information about the pubdata that will be needed to supply to the L1Messenger pubdata_information: OnceCell, + /// Protocol version. + protocol_version: ProtocolVersionId, } impl BootloaderState { @@ -49,6 +54,7 @@ impl BootloaderState { execution_mode: TxExecutionMode, initial_memory: BootloaderMemory, first_l2_block: L2BlockEnv, + protocol_version: ProtocolVersionId, ) -> Self { let l2_block = BootloaderL2Block::new(first_l2_block, 0); Self { @@ -59,6 +65,7 @@ impl BootloaderState { execution_mode, free_tx_offset: 0, pubdata_information: Default::default(), + protocol_version, } } @@ -139,12 +146,23 @@ impl BootloaderState { .expect("Pubdata information is not set") } + pub(crate) fn settlement_layer_pubdata(&self, pubdata_builder: &dyn PubdataBuilder) -> Vec { + let pubdata_information = self + .pubdata_information + .get() + .expect("Pubdata information is not set"); + pubdata_builder.settlement_layer_pubdata(pubdata_information, self.protocol_version) + } + fn last_mut_l2_block(&mut self) -> &mut BootloaderL2Block { self.l2_blocks.last_mut().unwrap() } /// Apply all bootloader transaction to the initial memory - pub(crate) fn bootloader_memory(&self) -> BootloaderMemory { + pub(crate) fn bootloader_memory( + &self, + pubdata_builder: &dyn PubdataBuilder, + ) -> BootloaderMemory { let mut initial_memory = self.initial_memory.clone(); let mut offset = 0; let mut compressed_bytecodes_offset = 0; @@ -172,11 +190,15 @@ impl BootloaderState { let pubdata_information = self .pubdata_information - .clone() - .into_inner() + .get() .expect("Empty pubdata information"); - apply_pubdata_to_memory(&mut initial_memory, pubdata_information); + apply_pubdata_to_memory( + &mut initial_memory, + pubdata_builder, + pubdata_information, + self.protocol_version, + ); initial_memory } diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs index 838c2d6ba60f..9eb55d794235 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs @@ -1,10 +1,12 @@ -use zksync_types::{ethabi, h256_to_u256, U256}; +use zksync_types::{ethabi, h256_to_u256, ProtocolVersionId, U256}; use super::{l2_block::BootloaderL2Block, tx::BootloaderTx}; use crate::{ - interface::{BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode}, + interface::{ + pubdata::{PubdataBuilder, PubdataInput}, + BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode, + }, utils::bytecode, - versions::vm_fast::pubdata::PubdataInput, vm_latest::constants::{ BOOTLOADER_TX_DESCRIPTION_OFFSET, BOOTLOADER_TX_DESCRIPTION_SIZE, COMPRESSED_BYTECODES_OFFSET, OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET, @@ -118,26 +120,54 @@ fn apply_l2_block_inner( ]) } +fn bootloader_memory_input( + pubdata_builder: &dyn PubdataBuilder, + input: &PubdataInput, + protocol_version: ProtocolVersionId, +) -> Vec { + let l2_da_validator_address = pubdata_builder.l2_da_validator(); + let operator_input = pubdata_builder.l1_messenger_operator_input(input, protocol_version); + ethabi::encode(&[ + ethabi::Token::Address(l2_da_validator_address), + ethabi::Token::Bytes(operator_input), + ]) +} + pub(crate) fn apply_pubdata_to_memory( memory: &mut BootloaderMemory, - pubdata_information: PubdataInput, + pubdata_builder: &dyn PubdataBuilder, + pubdata_information: &PubdataInput, + protocol_version: ProtocolVersionId, ) { - // Skipping two slots as they will be filled by the bootloader itself: - // - One slot is for the selector of the call to the L1Messenger. - // - The other slot is for the 0x20 offset for the calldata. - let l1_messenger_pubdata_start_slot = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + 2; - - // Need to skip first word as it represents array offset - // while bootloader expects only [len || data] - let pubdata = ethabi::encode(&[ethabi::Token::Bytes( - pubdata_information.build_pubdata(true), - )])[32..] - .to_vec(); - - assert!( - pubdata.len() / 32 <= OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS - 2, - "The encoded pubdata is too big" - ); + let (l1_messenger_pubdata_start_slot, pubdata) = if protocol_version.is_pre_gateway() { + // Skipping two slots as they will be filled by the bootloader itself: + // - One slot is for the selector of the call to the L1Messenger. + // - The other slot is for the 0x20 offset for the calldata. + let l1_messenger_pubdata_start_slot = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + 2; + // Need to skip first word as it represents array offset + // while bootloader expects only [len || data] + let pubdata = ethabi::encode(&[ethabi::Token::Bytes( + pubdata_builder.l1_messenger_operator_input(pubdata_information, protocol_version), + )])[32..] + .to_vec(); + assert!( + pubdata.len() / 32 <= OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS - 2, + "The encoded pubdata is too big" + ); + (l1_messenger_pubdata_start_slot, pubdata) + } else { + // Skipping the first slot as it will be filled by the bootloader itself: + // It is for the selector of the call to the L1Messenger. + let l1_messenger_pubdata_start_slot = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + 1; + let pubdata = + bootloader_memory_input(pubdata_builder, pubdata_information, protocol_version); + assert!( + // Note that unlike the previous version, the difference is `1`, since now it also includes the offset + pubdata.len() / 32 < OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS, + "The encoded pubdata is too big" + ); + (l1_messenger_pubdata_start_slot, pubdata) + }; pubdata .chunks(32) diff --git a/core/lib/multivm/src/versions/vm_fast/mod.rs b/core/lib/multivm/src/versions/vm_fast/mod.rs index de6e7bd4ef6a..840653b63b08 100644 --- a/core/lib/multivm/src/versions/vm_fast/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/mod.rs @@ -1,5 +1,6 @@ pub use zksync_vm2::interface; +pub(crate) use self::version::FastVmVersion; pub use self::vm::Vm; mod bootloader_state; @@ -10,10 +11,10 @@ mod evm_deploy_tracer; mod glue; mod hook; mod initial_bootloader_memory; -mod pubdata; mod refund; #[cfg(test)] mod tests; mod transaction_data; mod utils; +mod version; mod vm; diff --git a/core/lib/multivm/src/versions/vm_fast/pubdata.rs b/core/lib/multivm/src/versions/vm_fast/pubdata.rs deleted file mode 100644 index f938696297b5..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/pubdata.rs +++ /dev/null @@ -1,123 +0,0 @@ -use zksync_types::writes::{compress_state_diffs, StateDiffRecord}; - -use crate::interface::pubdata::L1MessengerL2ToL1Log; - -/// Struct based on which the pubdata blob is formed -#[derive(Debug, Clone, Default)] -pub(crate) struct PubdataInput { - pub(crate) user_logs: Vec, - pub(crate) l2_to_l1_messages: Vec>, - pub(crate) published_bytecodes: Vec>, - pub(crate) state_diffs: Vec, -} - -impl PubdataInput { - pub(crate) fn build_pubdata(self, with_uncompressed_state_diffs: bool) -> Vec { - let mut l1_messenger_pubdata = vec![]; - - let PubdataInput { - user_logs, - l2_to_l1_messages, - published_bytecodes, - state_diffs, - } = self; - - // Encoding user L2->L1 logs. - // Format: `[(numberOfL2ToL1Logs as u32) || l2tol1logs[1] || ... || l2tol1logs[n]]` - l1_messenger_pubdata.extend((user_logs.len() as u32).to_be_bytes()); - for l2tol1log in user_logs { - l1_messenger_pubdata.extend(l2tol1log.packed_encoding()); - } - - // Encoding L2->L1 messages - // Format: `[(numberOfMessages as u32) || (messages[1].len() as u32) || messages[1] || ... || (messages[n].len() as u32) || messages[n]]` - l1_messenger_pubdata.extend((l2_to_l1_messages.len() as u32).to_be_bytes()); - for message in l2_to_l1_messages { - l1_messenger_pubdata.extend((message.len() as u32).to_be_bytes()); - l1_messenger_pubdata.extend(message); - } - - // Encoding bytecodes - // Format: `[(numberOfBytecodes as u32) || (bytecodes[1].len() as u32) || bytecodes[1] || ... || (bytecodes[n].len() as u32) || bytecodes[n]]` - l1_messenger_pubdata.extend((published_bytecodes.len() as u32).to_be_bytes()); - for bytecode in published_bytecodes { - l1_messenger_pubdata.extend((bytecode.len() as u32).to_be_bytes()); - l1_messenger_pubdata.extend(bytecode); - } - - // Encoding state diffs - // Format: `[size of compressed state diffs u32 || compressed state diffs || (# state diffs: intial + repeated) as u32 || sorted state diffs by ]` - let state_diffs_compressed = compress_state_diffs(state_diffs.clone()); - l1_messenger_pubdata.extend(state_diffs_compressed); - - if with_uncompressed_state_diffs { - l1_messenger_pubdata.extend((state_diffs.len() as u32).to_be_bytes()); - for state_diff in state_diffs { - l1_messenger_pubdata.extend(state_diff.encode_padded()); - } - } - - l1_messenger_pubdata - } -} - -#[cfg(test)] -mod tests { - use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_types::u256_to_h256; - - use super::*; - - #[test] - fn test_basic_pubdata_building() { - // Just using some constant addresses for tests - let addr1 = BOOTLOADER_ADDRESS; - let addr2 = ACCOUNT_CODE_STORAGE_ADDRESS; - - let user_logs = vec![L1MessengerL2ToL1Log { - l2_shard_id: 0, - is_service: false, - tx_number_in_block: 0, - sender: addr1, - key: 1.into(), - value: 128.into(), - }]; - - let l2_to_l1_messages = vec![hex::decode("deadbeef").unwrap()]; - - let published_bytecodes = vec![hex::decode("aaaabbbb").unwrap()]; - - // For covering more cases, we have two state diffs: - // One with enumeration index present (and so it is a repeated write) and the one without it. - let state_diffs = vec![ - StateDiffRecord { - address: addr2, - key: 155.into(), - derived_key: u256_to_h256(125.into()).0, - enumeration_index: 12, - initial_value: 11.into(), - final_value: 12.into(), - }, - StateDiffRecord { - address: addr2, - key: 156.into(), - derived_key: u256_to_h256(126.into()).0, - enumeration_index: 0, - initial_value: 0.into(), - final_value: 14.into(), - }, - ]; - - let input = PubdataInput { - user_logs, - l2_to_l1_messages, - published_bytecodes, - state_diffs, - }; - - let pubdata = - ethabi::encode(&[ethabi::Token::Bytes(input.build_pubdata(true))])[32..].to_vec(); - - assert_eq!(hex::encode(pubdata), "00000000000000000000000000000000000000000000000000000000000002c700000001000000000000000000000000000000000000000000008001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000100000004deadbeef0000000100000004aaaabbbb0100002a040001000000000000000000000000000000000000000000000000000000000000007e090e0000000c0901000000020000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009b000000000000000000000000000000000000000000000000000000000000007d000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009c000000000000000000000000000000000000000000000000000000000000007e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"); - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs index e00a71a43c36..0a26e895b5a7 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs @@ -112,7 +112,7 @@ impl TestedVm for Vm> { } fn finish_batch_without_pubdata(&mut self) -> VmExecutionResultAndLogs { - self.inspect_inner(&mut Default::default(), VmExecutionMode::Batch) + self.inspect_inner(&mut Default::default(), VmExecutionMode::Batch, None) } fn insert_bytecodes(&mut self, bytecodes: &[&[u8]]) { diff --git a/core/lib/multivm/src/versions/vm_fast/version.rs b/core/lib/multivm/src/versions/vm_fast/version.rs new file mode 100644 index 000000000000..8da180d8ba59 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_fast/version.rs @@ -0,0 +1,28 @@ +use crate::{vm_latest::MultiVmSubversion, VmVersion}; + +#[derive(Debug, Copy, Clone)] +pub(crate) enum FastVmVersion { + IncreasedBootloaderMemory, + Gateway, +} + +impl From for MultiVmSubversion { + fn from(value: FastVmVersion) -> Self { + match value { + FastVmVersion::IncreasedBootloaderMemory => Self::IncreasedBootloaderMemory, + FastVmVersion::Gateway => Self::Gateway, + } + } +} + +impl TryFrom for FastVmVersion { + type Error = (); + + fn try_from(value: VmVersion) -> Result { + match value { + VmVersion::Vm1_5_0IncreasedBootloaderMemory => Ok(Self::IncreasedBootloaderMemory), + VmVersion::VmGateway => Ok(Self::Gateway), + _ => Err(()), + } + } +} diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index 2aab2bfc7d96..a91f0831ebbf 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -23,7 +23,6 @@ use zksync_vm2::{ interface::{CallframeInterface, HeapId, StateInterface, Tracer}, ExecutionEnd, FatPointer, Program, Settings, StorageSlot, VirtualMachine, }; -use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use super::{ bootloader_state::{BootloaderState, BootloaderStateSnapshot}, @@ -37,32 +36,28 @@ use super::{ use crate::{ glue::GlueInto, interface::{ + pubdata::{PubdataBuilder, PubdataInput}, storage::{ImmutableStorageView, ReadStorage, StoragePtr, StorageView}, BytecodeCompressionError, BytecodeCompressionResult, CurrentExecutionState, - ExecutionResult, FinishedL1Batch, Halt, L1BatchEnv, L2BlockEnv, PushTransactionResult, - Refunds, SystemEnv, TxRevertReason, VmEvent, VmExecutionLogs, VmExecutionMode, - VmExecutionResultAndLogs, VmExecutionStatistics, VmFactory, VmInterface, + ExecutionResult, FinishedL1Batch, Halt, InspectExecutionMode, L1BatchEnv, L2BlockEnv, + PushTransactionResult, Refunds, SystemEnv, TxRevertReason, VmEvent, VmExecutionLogs, + VmExecutionMode, VmExecutionResultAndLogs, VmExecutionStatistics, VmFactory, VmInterface, VmInterfaceHistoryEnabled, VmRevertReason, VmTrackingContracts, }, - is_supported_by_fast_vm, utils::events::extract_l2tol1logs_from_l1_messenger, vm_fast::{ bootloader_state::utils::{apply_l2_block, apply_pubdata_to_memory}, events::merge_events, - pubdata::PubdataInput, refund::compute_refund, + version::FastVmVersion, }, - vm_latest::{ - constants::{ - get_result_success_first_slot, get_vm_hook_params_start_position, get_vm_hook_position, - OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET, VM_HOOK_PARAMS_COUNT, - }, - MultiVMSubversion, + vm_latest::constants::{ + get_result_success_first_slot, get_vm_hook_params_start_position, get_vm_hook_position, + OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET, VM_HOOK_PARAMS_COUNT, }, + VmVersion, }; -const VM_VERSION: MultiVMSubversion = MultiVMSubversion::IncreasedBootloaderMemory; - type FullTracer = ((Tr, CircuitsTracer), EvmDeployTracer); #[derive(Debug)] @@ -103,17 +98,21 @@ pub struct Vm { pub(super) batch_env: L1BatchEnv, pub(super) system_env: SystemEnv, snapshot: Option, + vm_version: FastVmVersion, #[cfg(test)] enforced_state_diffs: Option>, } impl Vm { pub fn custom(batch_env: L1BatchEnv, system_env: SystemEnv, storage: S) -> Self { - assert!( - is_supported_by_fast_vm(system_env.version), - "Protocol version {:?} is not supported by fast VM", - system_env.version - ); + let vm_version: FastVmVersion = VmVersion::from(system_env.version) + .try_into() + .unwrap_or_else(|_| { + panic!( + "Protocol version {:?} is not supported by fast VM", + system_env.version + ) + }); let default_aa_code_hash = system_env.base_system_smart_contracts.default_aa.hash; let evm_emulator_hash = system_env @@ -147,7 +146,7 @@ impl Vm { Settings { default_aa_code_hash: default_aa_code_hash.into(), evm_interpreter_code_hash: evm_emulator_hash.into(), - hook_address: get_vm_hook_position(VM_VERSION) * 32, + hook_address: get_vm_hook_position(vm_version.into()) * 32, }, ); @@ -167,10 +166,12 @@ impl Vm { system_env.execution_mode, bootloader_memory.clone(), batch_env.first_l2_block, + system_env.version, ), system_env, batch_env, snapshot: None, + vm_version, #[cfg(test)] enforced_state_diffs: None, }; @@ -183,6 +184,7 @@ impl Vm { execution_mode: VmExecutionMode, tracer: &mut FullTracer, track_refunds: bool, + pubdata_builder: Option<&dyn PubdataBuilder>, ) -> VmRunResult { let mut refunds = Refunds { gas_refunded: 0, @@ -353,15 +355,19 @@ impl Vm { state_diffs: self.compute_state_diffs(), }; - // Save the pubdata for the future initial bootloader memory building - self.bootloader_state - .set_pubdata_input(pubdata_input.clone()); - // Apply the pubdata to the current memory let mut memory_to_apply = vec![]; - apply_pubdata_to_memory(&mut memory_to_apply, pubdata_input); + apply_pubdata_to_memory( + &mut memory_to_apply, + pubdata_builder.expect("`pubdata_builder` is required to finish batch"), + &pubdata_input, + self.system_env.version, + ); self.write_to_bootloader_heap(memory_to_apply); + + // Save the pubdata for the future initial bootloader memory building + self.bootloader_state.set_pubdata_input(pubdata_input); } Hook::PaymasterValidationEntered | Hook::ValidationStepEnded => { /* unused */ } @@ -386,8 +392,8 @@ impl Vm { } fn get_hook_params(&self) -> [U256; 3] { - (get_vm_hook_params_start_position(VM_VERSION) - ..get_vm_hook_params_start_position(VM_VERSION) + VM_HOOK_PARAMS_COUNT) + (get_vm_hook_params_start_position(self.vm_version.into()) + ..get_vm_hook_params_start_position(self.vm_version.into()) + VM_HOOK_PARAMS_COUNT) .map(|word| self.read_word_from_bootloader_heap(word as usize)) .collect::>() .try_into() @@ -396,7 +402,7 @@ impl Vm { fn get_tx_result(&self) -> U256 { let tx_idx = self.bootloader_state.current_tx(); - let slot = get_result_success_first_slot(VM_VERSION) as usize + tx_idx; + let slot = get_result_success_first_slot(self.vm_version.into()) as usize + tx_idx; self.read_word_from_bootloader_heap(slot) } @@ -578,6 +584,7 @@ impl Vm { &mut self, tracer: &mut Tr, execution_mode: VmExecutionMode, + pubdata_builder: Option<&dyn PubdataBuilder>, ) -> VmExecutionResultAndLogs { let mut track_refunds = false; if matches!(execution_mode, VmExecutionMode::OneTx) { @@ -593,7 +600,12 @@ impl Vm { (mem::take(tracer), CircuitsTracer::default()), EvmDeployTracer::new(self.world.dynamic_bytecodes.clone()), ); - let result = self.run(execution_mode, &mut full_tracer, track_refunds); + let result = self.run( + execution_mode, + &mut full_tracer, + track_refunds, + pubdata_builder, + ); let ((external_tracer, circuits_tracer), _) = full_tracer; *tracer = external_tracer; // place the tracer back @@ -712,7 +724,7 @@ impl VmInterface for Vm { tracer: &mut Self::TracerDispatcher, execution_mode: InspectExecutionMode, ) -> VmExecutionResultAndLogs { - self.inspect_inner(tracer, execution_mode.into()) + self.inspect_inner(tracer, execution_mode.into(), None) } fn inspect_transaction_with_bytecode_compression( @@ -739,19 +751,23 @@ impl VmInterface for Vm { self.bootloader_state.start_new_l2_block(l2_block_env) } - fn finish_batch(&mut self, _pubdata_builder: Rc) -> FinishedL1Batch { - let result = self.inspect_inner(&mut Tr::default(), VmExecutionMode::Batch); + fn finish_batch(&mut self, pubdata_builder: Rc) -> FinishedL1Batch { + let result = self.inspect_inner( + &mut Tr::default(), + VmExecutionMode::Batch, + Some(pubdata_builder.as_ref()), + ); let execution_state = self.get_current_execution_state(); - let bootloader_memory = self.bootloader_state.bootloader_memory(); + let bootloader_memory = self + .bootloader_state + .bootloader_memory(pubdata_builder.as_ref()); FinishedL1Batch { block_tip_execution_result: result, final_execution_state: execution_state, final_bootloader_memory: Some(bootloader_memory), pubdata_input: Some( self.bootloader_state - .get_pubdata_information() - .clone() - .build_pubdata(false), + .settlement_layer_pubdata(pubdata_builder.as_ref()), ), state_diffs: Some( self.bootloader_state diff --git a/core/lib/multivm/src/versions/vm_latest/constants.rs b/core/lib/multivm/src/versions/vm_latest/constants.rs index c047e6ffa3b0..c95771f9e849 100644 --- a/core/lib/multivm/src/versions/vm_latest/constants.rs +++ b/core/lib/multivm/src/versions/vm_latest/constants.rs @@ -5,7 +5,7 @@ pub use zk_evm_1_5_0::zkevm_opcode_defs::system_params::{ }; use zksync_system_constants::MAX_NEW_FACTORY_DEPS; -use super::vm::MultiVMSubversion; +use super::vm::MultiVmSubversion; use crate::vm_latest::old_vm::utils::heap_page_from_base; /// The amount of ergs to be reserved at the end of the batch to ensure that it has enough ergs to verify compression, etc. @@ -22,15 +22,15 @@ pub(crate) const MAX_BASE_LAYER_CIRCUITS: usize = 34100; /// the requirements on RAM. /// In this version of the VM the used bootloader memory bytes has increased from `30_000_000` to `59_000_000`, /// and then to `63_800_000` in a subsequent upgrade. -pub(crate) const fn get_used_bootloader_memory_bytes(subversion: MultiVMSubversion) -> usize { +pub(crate) const fn get_used_bootloader_memory_bytes(subversion: MultiVmSubversion) -> usize { match subversion { - MultiVMSubversion::SmallBootloaderMemory => 59_000_000, - MultiVMSubversion::IncreasedBootloaderMemory => 63_800_000, - MultiVMSubversion::Gateway => 63_800_000, + MultiVmSubversion::SmallBootloaderMemory => 59_000_000, + MultiVmSubversion::IncreasedBootloaderMemory => 63_800_000, + MultiVmSubversion::Gateway => 63_800_000, } } -pub(crate) const fn get_used_bootloader_memory_words(subversion: MultiVMSubversion) -> usize { +pub(crate) const fn get_used_bootloader_memory_words(subversion: MultiVmSubversion) -> usize { get_used_bootloader_memory_bytes(subversion) / 32 } @@ -105,7 +105,7 @@ pub(crate) const BOOTLOADER_TX_DESCRIPTION_OFFSET: usize = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS; /// The size of the bootloader memory dedicated to the encodings of transactions -pub(crate) const fn get_bootloader_tx_encoding_space(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_bootloader_tx_encoding_space(subversion: MultiVmSubversion) -> u32 { (get_used_bootloader_memory_words(subversion) - TX_DESCRIPTION_OFFSET - MAX_TXS_IN_BATCH) as u32 } @@ -129,21 +129,21 @@ pub const BOOTLOADER_HEAP_PAGE: u32 = heap_page_from_base(MemoryPage(INITIAL_BAS /// So the layout looks like this: /// `[param 0][param 1][param 2][vmhook opcode]` pub const VM_HOOK_PARAMS_COUNT: u32 = 3; -pub(crate) const fn get_vm_hook_position(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_vm_hook_position(subversion: MultiVmSubversion) -> u32 { get_result_success_first_slot(subversion) - 1 } -pub(crate) const fn get_vm_hook_params_start_position(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_vm_hook_params_start_position(subversion: MultiVmSubversion) -> u32 { get_vm_hook_position(subversion) - VM_HOOK_PARAMS_COUNT } /// Method that provides the start position of the vm hook in the memory for the latest version of v1.5.0. /// This method is used only in `test_infra` in the bootloader tests and that's why it should be exposed. pub const fn get_vm_hook_start_position_latest() -> u32 { - get_vm_hook_params_start_position(MultiVMSubversion::IncreasedBootloaderMemory) + get_vm_hook_params_start_position(MultiVmSubversion::IncreasedBootloaderMemory) } /// Arbitrary space in memory closer to the end of the page -pub(crate) const fn get_result_success_first_slot(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_result_success_first_slot(subversion: MultiVmSubversion) -> u32 { ((get_used_bootloader_memory_bytes(subversion) as u32) - (MAX_TXS_IN_BATCH as u32) * 32) / 32 } diff --git a/core/lib/multivm/src/versions/vm_latest/mod.rs b/core/lib/multivm/src/versions/vm_latest/mod.rs index 211c527c3816..46f8db789ddc 100644 --- a/core/lib/multivm/src/versions/vm_latest/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/mod.rs @@ -1,4 +1,4 @@ -pub(crate) use self::vm::MultiVMSubversion; +pub(crate) use self::vm::MultiVmSubversion; pub use self::{ bootloader_state::BootloaderState, old_vm::{ diff --git a/core/lib/multivm/src/versions/vm_latest/tests/constants.rs b/core/lib/multivm/src/versions/vm_latest/tests/constants.rs index 3b75bfd6d36b..8ee62650ca77 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/constants.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/constants.rs @@ -3,7 +3,7 @@ #[test] fn test_that_bootloader_encoding_space_is_large_enoguh() { let encoding_space = crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::latest(), + crate::vm_latest::MultiVmSubversion::latest(), ); assert!(encoding_space >= 330000, "Bootloader tx space is too small"); } diff --git a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs index 51c9dde0dd56..fc226f03ecea 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs @@ -91,7 +91,7 @@ impl TestedVm for TestedLatestVm { self.batch_env.clone(), VmExecutionMode::Batch, diffs, - crate::vm_latest::MultiVMSubversion::latest(), + crate::vm_latest::MultiVmSubversion::latest(), Some(pubdata_builder), ); self.inspect_inner( diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs b/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs index 7156acce152e..8755b98ddb8c 100755 --- a/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs @@ -32,7 +32,7 @@ use crate::{ CircuitsTracer, RefundsTracer, ResultTracer, }, types::internals::ZkSyncVmState, - vm::MultiVMSubversion, + vm::MultiVmSubversion, VmTracer, }, }; @@ -65,7 +65,7 @@ pub struct DefaultExecutionTracer { pub(crate) circuits_tracer: CircuitsTracer, // This tracer is responsible for handling EVM deployments and providing the data to the code decommitter. pub(crate) evm_deploy_tracer: Option>, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, storage: StoragePtr, _phantom: PhantomData, } @@ -80,7 +80,7 @@ impl DefaultExecutionTracer { storage: StoragePtr, refund_tracer: Option>, pubdata_tracer: Option>, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Self { Self { tx_has_been_processed: false, diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs index 4c71c3b2fc49..3698914630dd 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs @@ -33,7 +33,7 @@ use crate::{ tracers::{traits::VmTracer, utils::VmHook}, types::internals::ZkSyncVmState, utils::logs::collect_events_and_l1_system_logs_after_timestamp, - vm::MultiVMSubversion, + vm::MultiVmSubversion, StorageOracle, }, }; @@ -47,7 +47,7 @@ pub(crate) struct PubdataTracer { // For testing purposes it might be helpful to supply an exact set of state diffs to be provided // to the L1Messenger. enforced_state_diffs: Option>, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pubdata_builder: Option>, _phantom_data: PhantomData, } @@ -56,7 +56,7 @@ impl PubdataTracer { pub(crate) fn new( l1_batch_env: L1BatchEnv, execution_mode: VmExecutionMode, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pubdata_builder: Option>, ) -> Self { Self { @@ -77,7 +77,7 @@ impl PubdataTracer { l1_batch_env: L1BatchEnv, execution_mode: VmExecutionMode, forced_state_diffs: Vec, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pubdata_builder: Option>, ) -> Self { Self { diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs index f3fc1b167b45..6ef251c2db98 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs @@ -24,7 +24,7 @@ use crate::{ }, types::internals::ZkSyncVmState, utils::fee::get_batch_base_fee, - vm::MultiVMSubversion, + vm::MultiVmSubversion, }, }; @@ -50,12 +50,12 @@ pub(crate) struct RefundsTracer { spent_pubdata_counter_before: u32, l1_batch: L1BatchEnv, pubdata_published: u32, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, _phantom: PhantomData, } impl RefundsTracer { - pub(crate) fn new(l1_batch: L1BatchEnv, subversion: MultiVMSubversion) -> Self { + pub(crate) fn new(l1_batch: L1BatchEnv, subversion: MultiVmSubversion) -> Self { Self { pending_refund_request: None, refund_gas: 0, diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs index 0687c8393c62..80a3147f65d2 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs @@ -23,7 +23,7 @@ use crate::{ utils::{get_vm_hook_params, read_pointer, VmHook}, }, types::internals::ZkSyncVmState, - vm::MultiVMSubversion, + vm::MultiVmSubversion, BootloaderState, HistoryMode, SimpleMemory, }, }; @@ -102,7 +102,7 @@ pub(crate) struct ResultTracer { execution_mode: VmExecutionMode, far_call_tracker: FarCallTracker, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pub(crate) tx_finished_in_one_tx_mode: bool, @@ -110,7 +110,7 @@ pub(crate) struct ResultTracer { } impl ResultTracer { - pub(crate) fn new(execution_mode: VmExecutionMode, subversion: MultiVMSubversion) -> Self { + pub(crate) fn new(execution_mode: VmExecutionMode, subversion: MultiVmSubversion) -> Self { Self { result: None, bootloader_out_of_gas: false, @@ -336,7 +336,7 @@ impl ResultTracer { pub(crate) fn tx_has_failed( state: &ZkSyncVmState, tx_id: u32, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> bool { let mem_slot = get_result_success_first_slot(subversion) + tx_id; let mem_value = state diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs index 50901dca62fc..6f81a3ac8de5 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs @@ -21,7 +21,7 @@ use crate::vm_latest::{ memory::SimpleMemory, utils::{aux_heap_page_from_base, heap_page_from_base}, }, - vm::MultiVMSubversion, + vm::MultiVmSubversion, }; #[derive(Clone, Debug, Copy)] @@ -47,7 +47,7 @@ impl VmHook { pub(crate) fn from_opcode_memory( state: &VmLocalStateData<'_>, data: &BeforeExecutionData, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Self { let opcode_variant = data.opcode.variant; let heap_page = @@ -89,7 +89,7 @@ impl VmHook { pub(crate) fn get_debug_log( state: &VmLocalStateData<'_>, memory: &SimpleMemory, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> String { let vm_hook_params: Vec<_> = get_vm_hook_params(memory, subversion) .into_iter() @@ -161,7 +161,7 @@ pub(crate) fn print_debug_if_needed( state: &VmLocalStateData<'_>, memory: &SimpleMemory, latest_returndata_ptr: Option, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) { let log = match hook { VmHook::DebugLog => get_debug_log(state, memory, subversion), @@ -210,7 +210,7 @@ pub(crate) fn get_calldata_page_via_abi(far_call_abi: &FarCallABI, base_page: Me } pub(crate) fn get_vm_hook_params( memory: &SimpleMemory, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Vec { let start_position = get_vm_hook_params_start_position(subversion); memory.dump_page_content_as_u256_words( diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index 5a0e77023a5e..ada20af9fa3c 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -35,7 +35,7 @@ use crate::{ /// version was released with increased bootloader memory. The version with the small bootloader memory /// is available only on internal staging environments. #[derive(Debug, Copy, Clone)] -pub(crate) enum MultiVMSubversion { +pub(crate) enum MultiVmSubversion { /// The initial version of v1.5.0, available only on staging environments. SmallBootloaderMemory, /// The final correct version of v1.5.0 @@ -44,7 +44,7 @@ pub(crate) enum MultiVMSubversion { Gateway, } -impl MultiVMSubversion { +impl MultiVmSubversion { #[cfg(test)] pub(crate) fn latest() -> Self { Self::IncreasedBootloaderMemory @@ -53,7 +53,7 @@ impl MultiVMSubversion { #[derive(Debug)] pub(crate) struct VmVersionIsNotVm150Error; -impl TryFrom for MultiVMSubversion { +impl TryFrom for MultiVmSubversion { type Error = VmVersionIsNotVm150Error; fn try_from(value: VmVersion) -> Result { match value { @@ -77,7 +77,7 @@ pub struct Vm { pub(crate) batch_env: L1BatchEnv, // Snapshots for the current run pub(crate) snapshots: Vec, - pub(crate) subversion: MultiVMSubversion, + pub(crate) subversion: MultiVmSubversion, _phantom: std::marker::PhantomData, } @@ -247,7 +247,7 @@ impl Vm { batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Self { let (state, bootloader_state) = new_vm_state(storage.clone(), &system_env, &batch_env); Self { diff --git a/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs b/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs index 32930f31cd71..f430ad346387 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs @@ -10,7 +10,7 @@ use crate::vm_m5::{ storage::StorageOracle, }, storage::{Storage, StoragePtr}, - vm_instance::MultiVMSubversion, + vm_instance::MultiVmSubversion, }; #[derive(Debug)] @@ -25,7 +25,7 @@ pub struct OracleTools { } impl OracleTools { - pub fn new(storage_pointer: StoragePtr, refund_state: MultiVMSubversion) -> Self { + pub fn new(storage_pointer: StoragePtr, refund_state: MultiVmSubversion) -> Self { Self { storage: StorageOracle::new(storage_pointer.clone(), refund_state), memory: SimpleMemory::default(), diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs b/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs index ab373e9e7696..90bd9cfaab69 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs @@ -18,7 +18,7 @@ use crate::vm_m5::{ }, storage::{Storage, StoragePtr}, utils::StorageLogQuery, - vm_instance::MultiVMSubversion, + vm_instance::MultiVmSubversion, }; // While the storage does not support different shards, it was decided to write the @@ -45,7 +45,7 @@ pub struct StorageOracle { // to cover this slot. pub paid_changes: HistoryRecorder>, - pub refund_state: MultiVMSubversion, + pub refund_state: MultiVmSubversion, } impl OracleWithHistory for StorageOracle { @@ -63,7 +63,7 @@ impl OracleWithHistory for StorageOracle { } impl StorageOracle { - pub fn new(storage: StoragePtr, refund_state: MultiVMSubversion) -> Self { + pub fn new(storage: StoragePtr, refund_state: MultiVmSubversion) -> Self { Self { storage: HistoryRecorder::from_inner(StorageWrapper::new(storage)), frames_stack: Default::default(), @@ -74,10 +74,10 @@ impl StorageOracle { fn is_storage_key_free(&self, key: &StorageKey) -> bool { match self.refund_state { - MultiVMSubversion::V1 => { + MultiVmSubversion::V1 => { key.address() == &zksync_system_constants::SYSTEM_CONTEXT_ADDRESS } - MultiVMSubversion::V2 => { + MultiVmSubversion::V2 => { key.address() == &zksync_system_constants::SYSTEM_CONTEXT_ADDRESS || *key == storage_key_for_eth_balance(&BOOTLOADER_ADDRESS) } diff --git a/core/lib/multivm/src/versions/vm_m5/vm.rs b/core/lib/multivm/src/versions/vm_m5/vm.rs index 266a0a437e5e..bd104b868401 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm.rs @@ -12,7 +12,7 @@ use crate::{ }, vm_m5::{ storage::Storage, - vm_instance::{MultiVMSubversion, VmInstance}, + vm_instance::{MultiVmSubversion, VmInstance}, }, }; @@ -28,7 +28,7 @@ impl Vm { batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr, - vm_sub_version: MultiVMSubversion, + vm_sub_version: MultiVmSubversion, ) -> Self { let oracle_tools = crate::vm_m5::OracleTools::new(storage.clone(), vm_sub_version); let block_properties = zk_evm_1_3_1::block_properties::BlockProperties { @@ -127,8 +127,8 @@ impl VmFactory for Vm { fn new(batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr) -> Self { let vm_version: VmVersion = system_env.version.into(); let vm_sub_version = match vm_version { - VmVersion::M5WithoutRefunds => MultiVMSubversion::V1, - VmVersion::M5WithRefunds => MultiVMSubversion::V2, + VmVersion::M5WithoutRefunds => MultiVmSubversion::V1, + VmVersion::M5WithRefunds => MultiVmSubversion::V2, _ => panic!("Unsupported protocol version for vm_m5: {:?}", vm_version), }; Self::new_with_subversion(batch_env, system_env, storage, vm_sub_version) diff --git a/core/lib/multivm/src/versions/vm_m5/vm_instance.rs b/core/lib/multivm/src/versions/vm_m5/vm_instance.rs index 4a96c4a750cc..94b86bce7ea7 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm_instance.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm_instance.rs @@ -81,7 +81,7 @@ pub(crate) fn get_vm_hook_params(memory: &SimpleMemory) -> Vec { /// /// This enum allows to execute blocks with the same VM but different support for refunds. #[derive(Debug, Copy, Clone)] -pub enum MultiVMSubversion { +pub enum MultiVmSubversion { /// Initial VM M5 version, refunds are fully disabled. V1, /// Refunds were enabled. ETH balance for bootloader address was marked as a free slot. @@ -99,7 +99,7 @@ pub struct VmInstance { pub snapshots: Vec, /// MultiVM-specific addition. See enum doc-comment for details. - pub(crate) refund_state: MultiVMSubversion, + pub(crate) refund_state: MultiVmSubversion, } /// This structure stores data that accumulates during the VM run. @@ -560,12 +560,12 @@ impl VmInstance { let refund_to_propose; let refund_slot; match self.refund_state { - MultiVMSubversion::V1 => { + MultiVmSubversion::V1 => { refund_to_propose = bootloader_refund; refund_slot = OPERATOR_REFUNDS_OFFSET + self.bootloader_state.tx_to_execute() - 1; } - MultiVMSubversion::V2 => { + MultiVmSubversion::V2 => { let gas_spent_on_pubdata = tracer .gas_spent_on_pubdata(&self.state.local_state) - spent_pubdata_counter_before; diff --git a/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs index 653169cd7ff0..706c0fbc717c 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs @@ -30,7 +30,7 @@ use crate::{ utils::{ code_page_candidate_from_base, heap_page_from_base, BLOCK_GAS_LIMIT, INITIAL_BASE_PAGE, }, - vm_instance::{MultiVMSubversion, VmInstance, ZkSyncVmState}, + vm_instance::{MultiVmSubversion, VmInstance, ZkSyncVmState}, OracleTools, }, }; @@ -222,7 +222,7 @@ impl Default for TxExecutionMode { } pub fn init_vm( - refund_state: MultiVMSubversion, + refund_state: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -241,7 +241,7 @@ pub fn init_vm( } pub fn init_vm_with_gas_limit( - refund_state: MultiVMSubversion, + refund_state: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -338,7 +338,7 @@ impl BlockContextMode { // This method accepts a custom bootloader code. // It should be used only in tests. pub fn init_vm_inner( - refund_state: MultiVMSubversion, + refund_state: MultiVmSubversion, mut oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, diff --git a/core/lib/multivm/src/versions/vm_m6/vm.rs b/core/lib/multivm/src/versions/vm_m6/vm.rs index 0443dc8fb55e..2ed2666b2208 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm.rs @@ -14,7 +14,7 @@ use crate::{ }, tracers::old::TracerDispatcher, utils::bytecode, - vm_m6::{storage::Storage, vm_instance::MultiVMSubversion, VmInstance}, + vm_m6::{storage::Storage, vm_instance::MultiVmSubversion, VmInstance}, }; #[derive(Debug)] @@ -28,7 +28,7 @@ impl Vm { batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr, - vm_sub_version: MultiVMSubversion, + vm_sub_version: MultiVmSubversion, ) -> Self { let oracle_tools = crate::vm_m6::OracleTools::new(storage.clone(), H::VmM6Mode::default()); let block_properties = zk_evm_1_3_1::block_properties::BlockProperties { @@ -220,8 +220,8 @@ impl VmFactory for Vm { fn new(batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr) -> Self { let vm_version: VmVersion = system_env.version.into(); let vm_sub_version = match vm_version { - VmVersion::M6Initial => MultiVMSubversion::V1, - VmVersion::M6BugWithCompressionFixed => MultiVMSubversion::V2, + VmVersion::M6Initial => MultiVmSubversion::V1, + VmVersion::M6BugWithCompressionFixed => MultiVmSubversion::V2, _ => panic!("Unsupported protocol version for vm_m6: {:?}", vm_version), }; Self::new_with_subversion(batch_env, system_env, storage, vm_sub_version) diff --git a/core/lib/multivm/src/versions/vm_m6/vm_instance.rs b/core/lib/multivm/src/versions/vm_m6/vm_instance.rs index d6c418da4c20..29ef17aa4bc7 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm_instance.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm_instance.rs @@ -82,7 +82,7 @@ pub(crate) fn get_vm_hook_params(memory: &SimpleMemory) -> Ve /// /// This enum allows to execute blocks with the same VM but different support for refunds. #[derive(Debug, Copy, Clone)] -pub enum MultiVMSubversion { +pub enum MultiVmSubversion { /// Initial VM M6 version. V1, /// Bug with code compression was fixed. @@ -98,7 +98,7 @@ pub struct VmInstance { pub(crate) bootloader_state: BootloaderState, pub snapshots: Vec, - pub vm_subversion: MultiVMSubversion, + pub vm_subversion: MultiVmSubversion, } /// This structure stores data that accumulates during the VM run. diff --git a/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs index a47ffb116364..24cddd5eb5ea 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs @@ -30,7 +30,7 @@ use crate::{ utils::{ code_page_candidate_from_base, heap_page_from_base, BLOCK_GAS_LIMIT, INITIAL_BASE_PAGE, }, - vm_instance::{MultiVMSubversion, ZkSyncVmState}, + vm_instance::{MultiVmSubversion, ZkSyncVmState}, OracleTools, VmInstance, }, }; @@ -270,7 +270,7 @@ impl Default for TxExecutionMode { } pub fn init_vm( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -289,7 +289,7 @@ pub fn init_vm( } pub fn init_vm_with_gas_limit( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -386,7 +386,7 @@ impl BlockContextMode { // This method accepts a custom bootloader code. // It should be used only in tests. pub fn init_vm_inner( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, mut oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -434,7 +434,7 @@ fn bootloader_initial_memory(block_properties: &BlockContextMode) -> Vec<(usize, } pub fn get_bootloader_memory( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, txs: Vec, predefined_refunds: Vec, predefined_compressed_bytecodes: Vec>, @@ -442,14 +442,14 @@ pub fn get_bootloader_memory( block_context: BlockContextMode, ) -> Vec<(usize, U256)> { match vm_subversion { - MultiVMSubversion::V1 => get_bootloader_memory_v1( + MultiVmSubversion::V1 => get_bootloader_memory_v1( txs, predefined_refunds, predefined_compressed_bytecodes, execution_mode, block_context, ), - MultiVMSubversion::V2 => get_bootloader_memory_v2( + MultiVmSubversion::V2 => get_bootloader_memory_v2( txs, predefined_refunds, predefined_compressed_bytecodes, @@ -576,14 +576,14 @@ pub fn push_raw_transaction_to_bootloader_memory( explicit_compressed_bytecodes: Option>, ) -> Vec { match vm.vm_subversion { - MultiVMSubversion::V1 => push_raw_transaction_to_bootloader_memory_v1( + MultiVmSubversion::V1 => push_raw_transaction_to_bootloader_memory_v1( vm, tx, execution_mode, predefined_overhead, explicit_compressed_bytecodes, ), - MultiVMSubversion::V2 => push_raw_transaction_to_bootloader_memory_v2( + MultiVmSubversion::V2 => push_raw_transaction_to_bootloader_memory_v2( vm, tx, execution_mode, diff --git a/core/lib/multivm/src/vm_instance.rs b/core/lib/multivm/src/vm_instance.rs index e2f72bd24113..9de99a7eb116 100644 --- a/core/lib/multivm/src/vm_instance.rs +++ b/core/lib/multivm/src/vm_instance.rs @@ -14,6 +14,7 @@ use crate::{ VmMemoryMetrics, }, tracers::TracerDispatcher, + vm_fast::FastVmVersion, vm_latest::HistoryEnabled, }; @@ -132,7 +133,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m5::vm_instance::MultiVMSubversion::V1, + crate::vm_m5::vm_instance::MultiVmSubversion::V1, ); Self::VmM5(vm) } @@ -141,7 +142,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m5::vm_instance::MultiVMSubversion::V2, + crate::vm_m5::vm_instance::MultiVmSubversion::V2, ); Self::VmM5(vm) } @@ -150,7 +151,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m6::vm_instance::MultiVMSubversion::V1, + crate::vm_m6::vm_instance::MultiVmSubversion::V1, ); Self::VmM6(vm) } @@ -159,7 +160,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m6::vm_instance::MultiVMSubversion::V2, + crate::vm_m6::vm_instance::MultiVmSubversion::V2, ); Self::VmM6(vm) } @@ -194,7 +195,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ); Self::Vm1_5_0(vm) } @@ -203,7 +204,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ); Self::Vm1_5_0(vm) } @@ -212,7 +213,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ); Self::Vm1_5_0(vm) } @@ -340,8 +341,5 @@ impl FastVmInstance { /// Checks whether the protocol version is supported by the fast VM. pub fn is_supported_by_fast_vm(protocol_version: ProtocolVersionId) -> bool { - matches!( - protocol_version.into(), - VmVersion::Vm1_5_0IncreasedBootloaderMemory - ) + FastVmVersion::try_from(VmVersion::from(protocol_version)).is_ok() } diff --git a/core/lib/vm_executor/src/batch/factory.rs b/core/lib/vm_executor/src/batch/factory.rs index de0db5f0bf75..76ef244401bd 100644 --- a/core/lib/vm_executor/src/batch/factory.rs +++ b/core/lib/vm_executor/src/batch/factory.rs @@ -18,7 +18,7 @@ use zksync_multivm::{ tracers::CallTracer, vm_fast, vm_latest::HistoryEnabled, - FastVmInstance, LegacyVmInstance, MultiVMTracer, + FastVmInstance, LegacyVmInstance, MultiVmTracer, }; use zksync_types::{commitment::PubdataParams, vm::FastVmMode, Transaction}; diff --git a/core/lib/vm_executor/src/oneshot/contracts.rs b/core/lib/vm_executor/src/oneshot/contracts.rs index d4e0a94f9178..cacab36cb1c2 100644 --- a/core/lib/vm_executor/src/oneshot/contracts.rs +++ b/core/lib/vm_executor/src/oneshot/contracts.rs @@ -26,7 +26,7 @@ impl ContractsKind for CallOrExecute {} /// Provider of [`BaseSystemContracts`] for oneshot execution. /// -/// The main implementation of this trait is [`MultiVMBaseSystemContracts`], which selects contracts +/// The main implementation of this trait is [`MultiVmBaseSystemContracts`], which selects contracts /// based on [`ProtocolVersionId`]. #[async_trait] pub trait BaseSystemContractsProvider: fmt::Debug + Send + Sync { @@ -46,7 +46,7 @@ pub trait BaseSystemContractsProvider: fmt::Debug + Send + Syn /// System contracts (bootloader and default account abstraction) for all supported VM versions. #[derive(Debug)] -pub struct MultiVMBaseSystemContracts { +pub struct MultiVmBaseSystemContracts { /// Contracts to be used for pre-virtual-blocks protocol versions. pre_virtual_blocks: BaseSystemContracts, /// Contracts to be used for post-virtual-blocks protocol versions. @@ -69,11 +69,11 @@ pub struct MultiVMBaseSystemContracts { vm_protocol_defense: BaseSystemContracts, /// Contracts to be used after the gateway upgrade gateway: BaseSystemContracts, - // We use `fn() -> C` marker so that the `MultiVMBaseSystemContracts` unconditionally implements `Send + Sync`. + // We use `fn() -> C` marker so that the `MultiVmBaseSystemContracts` unconditionally implements `Send + Sync`. _contracts_kind: PhantomData C>, } -impl MultiVMBaseSystemContracts { +impl MultiVmBaseSystemContracts { fn get_by_protocol_version( &self, version: ProtocolVersionId, @@ -120,7 +120,7 @@ impl MultiVMBaseSystemContracts { } } -impl MultiVMBaseSystemContracts { +impl MultiVmBaseSystemContracts { /// Returned system contracts (mainly the bootloader) are tuned to provide accurate execution metrics. pub fn load_estimate_gas_blocking() -> Self { Self { @@ -142,7 +142,7 @@ impl MultiVMBaseSystemContracts { } } -impl MultiVMBaseSystemContracts { +impl MultiVmBaseSystemContracts { /// Returned system contracts (mainly the bootloader) are tuned to provide better UX (e.g. revert messages). pub fn load_eth_call_blocking() -> Self { Self { @@ -165,7 +165,7 @@ impl MultiVMBaseSystemContracts { } #[async_trait] -impl BaseSystemContractsProvider for MultiVMBaseSystemContracts { +impl BaseSystemContractsProvider for MultiVmBaseSystemContracts { async fn base_system_contracts( &self, block_info: &ResolvedBlockInfo, diff --git a/core/lib/vm_executor/src/oneshot/mod.rs b/core/lib/vm_executor/src/oneshot/mod.rs index e95164c0fc87..0dfdb67bff52 100644 --- a/core/lib/vm_executor/src/oneshot/mod.rs +++ b/core/lib/vm_executor/src/oneshot/mod.rs @@ -29,7 +29,7 @@ use zksync_multivm::{ utils::adjust_pubdata_price_for_tx, vm_latest::{HistoryDisabled, HistoryEnabled}, zk_evm_latest::ethereum_types::U256, - FastVmInstance, HistoryMode, LegacyVmInstance, MultiVMTracer, + FastVmInstance, HistoryMode, LegacyVmInstance, MultiVmTracer, }; use zksync_types::{ block::pack_block_info, @@ -46,7 +46,7 @@ pub use self::{ block::{BlockInfo, ResolvedBlockInfo}, contracts::{ BaseSystemContractsProvider, CallOrExecute, ContractsKind, EstimateGas, - MultiVMBaseSystemContracts, + MultiVmBaseSystemContracts, }, env::OneshotEnvParameters, mock::MockOneshotExecutor, diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index 180b53492839..91fb84ab8f17 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -33,7 +33,7 @@ use zksync_types::{ MAX_NEW_FACTORY_DEPS, U256, }; use zksync_vm_executor::oneshot::{ - CallOrExecute, EstimateGas, MultiVMBaseSystemContracts, OneshotEnvParameters, + CallOrExecute, EstimateGas, MultiVmBaseSystemContracts, OneshotEnvParameters, }; pub(super) use self::{gas_estimation::BinarySearchKind, result::SubmitTxError}; @@ -109,11 +109,11 @@ impl SandboxExecutorOptions { validation_computational_gas_limit: u32, ) -> anyhow::Result { let estimate_gas_contracts = - tokio::task::spawn_blocking(MultiVMBaseSystemContracts::load_estimate_gas_blocking) + tokio::task::spawn_blocking(MultiVmBaseSystemContracts::load_estimate_gas_blocking) .await .context("failed loading base contracts for gas estimation")?; let call_contracts = - tokio::task::spawn_blocking(MultiVMBaseSystemContracts::load_eth_call_blocking) + tokio::task::spawn_blocking(MultiVmBaseSystemContracts::load_eth_call_blocking) .await .context("failed loading base contracts for calls / tx execution")?; diff --git a/core/node/consensus/src/vm.rs b/core/node/consensus/src/vm.rs index cbd4918dcee1..81d26ebc3758 100644 --- a/core/node/consensus/src/vm.rs +++ b/core/node/consensus/src/vm.rs @@ -8,7 +8,7 @@ use zksync_state::PostgresStorage; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; use zksync_types::{ethabi, fee::Fee, l2::L2Tx, AccountTreeId, L2ChainId, Nonce, U256}; use zksync_vm_executor::oneshot::{ - CallOrExecute, MainOneshotExecutor, MultiVMBaseSystemContracts, OneshotEnvParameters, + CallOrExecute, MainOneshotExecutor, MultiVmBaseSystemContracts, OneshotEnvParameters, }; use zksync_vm_interface::{ executor::OneshotExecutor, storage::StorageWithOverrides, ExecutionResult, @@ -29,7 +29,7 @@ impl VM { /// Constructs a new `VM` instance. pub async fn new(pool: ConnectionPool) -> Self { let base_system_contracts = - scope::wait_blocking(MultiVMBaseSystemContracts::load_eth_call_blocking).await; + scope::wait_blocking(MultiVmBaseSystemContracts::load_eth_call_blocking).await; Self { pool, // L2 chain ID and fee account don't seem to matter for calls, hence the use of default values. diff --git a/core/node/genesis/src/utils.rs b/core/node/genesis/src/utils.rs index d89d7475e84b..d8076229fa54 100644 --- a/core/node/genesis/src/utils.rs +++ b/core/node/genesis/src/utils.rs @@ -5,7 +5,7 @@ use zksync_contracts::BaseSystemContracts; use zksync_dal::{Connection, Core, CoreDal}; use zksync_multivm::{ circuit_sequencer_api_latest::sort_storage_access::sort_storage_access_queries, - zk_evm_latest::aux_structures::{LogQuery as MultiVmLogQuery, Timestamp as MultiVMTimestamp}, + zk_evm_latest::aux_structures::{LogQuery as MultiVmLogQuery, Timestamp as MultiVmTimestamp}, }; use zksync_system_constants::{DEFAULT_ERA_CHAIN_ID, ETHEREUM_ADDRESS}; use zksync_types::{ @@ -84,7 +84,7 @@ pub(super) fn get_deduped_log_queries(storage_logs: &[StorageLog]) -> Vec