From 3606fc1d8f103b4f7174301f9a985ace2b89038d Mon Sep 17 00:00:00 2001 From: Ryan Doyle Date: Fri, 8 Nov 2024 03:36:56 -0700 Subject: [PATCH 1/7] feat(vm_executor): Add new histogram metric for gas per tx in vm_executor (#3215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Adds a new histogram metric representing total gas used per tx - Adds a new histogram metric representing the gas limit per failed tx ## Why ❔ - The existing metric is a histogram of rates. That's a little difficult to extract useful throughput numbers out of with the notable exception of efficiency. - Exporting this metric to let the consumer do their own: rate/anomoly calculation on gas/tx, ratio of gas burn for failed tx, etc. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- core/lib/vm_executor/src/batch/executor.rs | 9 ++++++--- core/lib/vm_executor/src/batch/metrics.rs | 10 ++++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/core/lib/vm_executor/src/batch/executor.rs b/core/lib/vm_executor/src/batch/executor.rs index 6dc9354fd7db..12b0718a4a56 100644 --- a/core/lib/vm_executor/src/batch/executor.rs +++ b/core/lib/vm_executor/src/batch/executor.rs @@ -99,11 +99,13 @@ where let elapsed = latency.observe(); if !res.tx_result.result.is_failed() { - let gas_per_nanosecond = - res.tx_result.statistics.computational_gas_used as f64 / elapsed.as_nanos() as f64; + let gas_used = res.tx_result.statistics.computational_gas_used; EXECUTOR_METRICS .computational_gas_per_nanosecond - .observe(gas_per_nanosecond); + .observe(gas_used as f64 / elapsed.as_nanos() as f64); + EXECUTOR_METRICS + .computational_gas_used + .observe(gas_used.into()); } else { // The amount of computational gas paid for failed transactions is hard to get // but comparing to the gas limit makes sense, since we can burn all gas @@ -111,6 +113,7 @@ where EXECUTOR_METRICS .failed_tx_gas_limit_per_nanosecond .observe(tx_gas_limit as f64 / elapsed.as_nanos() as f64); + EXECUTOR_METRICS.failed_tx_gas_limit.observe(tx_gas_limit); } Ok(res) } diff --git a/core/lib/vm_executor/src/batch/metrics.rs b/core/lib/vm_executor/src/batch/metrics.rs index 6851193e9be9..37f7997c31fd 100644 --- a/core/lib/vm_executor/src/batch/metrics.rs +++ b/core/lib/vm_executor/src/batch/metrics.rs @@ -21,6 +21,10 @@ const GAS_PER_NANOSECOND_BUCKETS: Buckets = Buckets::values(&[ 0.01, 0.03, 0.1, 0.3, 0.5, 0.75, 1., 1.5, 3., 5., 10., 20., 50., ]); +const GAS_USED_BUCKETS: Buckets = Buckets::values(&[ + 10000., 25000., 45000., 70000., 100000., 150000., 225000., 350000., 500000., +]); + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] #[metrics(label = "stage", rename_all = "snake_case")] pub(super) enum TxExecutionStage { @@ -37,8 +41,14 @@ pub(super) struct ExecutorMetrics { pub batch_executor_command_response_time: Family>, #[metrics(buckets = GAS_PER_NANOSECOND_BUCKETS)] pub computational_gas_per_nanosecond: Histogram, + /// Computational gas used, per transaction. + #[metrics(buckets = GAS_USED_BUCKETS)] + pub computational_gas_used: Histogram, #[metrics(buckets = GAS_PER_NANOSECOND_BUCKETS)] pub failed_tx_gas_limit_per_nanosecond: Histogram, + /// Gas limit, per failed transaction. + #[metrics(buckets = GAS_USED_BUCKETS)] + pub failed_tx_gas_limit: Histogram, /// Cumulative latency of interacting with the storage when executing a transaction /// in the batch executor. #[metrics(buckets = Buckets::LATENCIES)] From e874fbc567e929a0fb24b624a594db06ffaee385 Mon Sep 17 00:00:00 2001 From: Aleksandr Stepanov Date: Fri, 8 Nov 2024 11:56:40 +0100 Subject: [PATCH 2/7] ci: Try to get setup_2\26 key from cache (#3235) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Try to get etup_2\26 key from cache, instead of downloading it from bucket. ## Why ❔ Improve speed for download. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- .github/workflows/build-tee-prover-template.yml | 1 - .github/workflows/ci-common-reusable.yml | 3 +-- .github/workflows/ci-prover-e2e.yml | 2 +- .github/workflows/new-build-contract-verifier-template.yml | 6 +++++- .github/workflows/new-build-core-template.yml | 6 +++++- .github/workflows/new-build-prover-template.yml | 1 - 6 files changed, 12 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-tee-prover-template.yml b/.github/workflows/build-tee-prover-template.yml index 0e5b80d2e3a2..c55e06931247 100644 --- a/.github/workflows/build-tee-prover-template.yml +++ b/.github/workflows/build-tee-prover-template.yml @@ -76,4 +76,3 @@ jobs: docker push "${repo}/${tag}" done done - diff --git a/.github/workflows/ci-common-reusable.yml b/.github/workflows/ci-common-reusable.yml index ea91fc4a7cd6..d57630d3029a 100644 --- a/.github/workflows/ci-common-reusable.yml +++ b/.github/workflows/ci-common-reusable.yml @@ -28,7 +28,7 @@ jobs: run: | run_retried docker-compose -f ${RUNNER_COMPOSE_FILE} pull docker-compose -f ${RUNNER_COMPOSE_FILE} up --build -d zk postgres - + - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup @@ -38,4 +38,3 @@ jobs: # `zk lint prover` = cargo clippy, which does cargo check behind the scenes, which is a lightweight version of cargo build - name: Lints run: ci_run zkstack dev lint -t rs --check - diff --git a/.github/workflows/ci-prover-e2e.yml b/.github/workflows/ci-prover-e2e.yml index b0b9caf888fc..6076874c3710 100644 --- a/.github/workflows/ci-prover-e2e.yml +++ b/.github/workflows/ci-prover-e2e.yml @@ -100,7 +100,7 @@ jobs: - name: Kill prover & start compressor run: | sudo ./bin/prover_checkers/kill_prover - + ci_run zkstack prover run --component=compressor --docker=false &>prover_logs/compressor.log & - name: Wait for batch to be executed on L1 env: diff --git a/.github/workflows/new-build-contract-verifier-template.yml b/.github/workflows/new-build-contract-verifier-template.yml index 9b23cda6f02a..7e48968a65c1 100644 --- a/.github/workflows/new-build-contract-verifier-template.yml +++ b/.github/workflows/new-build-contract-verifier-template.yml @@ -176,7 +176,11 @@ jobs: - name: Download setup key shell: bash run: | - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + if [ -f "/setup_2^26.key" ]; then + cp '/setup_2^26.key' './setup_2^26.key' + else + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + fi - name: Set env vars shell: bash diff --git a/.github/workflows/new-build-core-template.yml b/.github/workflows/new-build-core-template.yml index c4aeb9180fda..350d689c4572 100644 --- a/.github/workflows/new-build-core-template.yml +++ b/.github/workflows/new-build-core-template.yml @@ -187,7 +187,11 @@ jobs: - name: Download setup key shell: bash run: | - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + if [ -f "/setup_2^26.key" ]; then + cp '/setup_2^26.key' './setup_2^26.key' + else + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + fi - name: Set env vars shell: bash diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml index 5d42696c0b2a..046711d679e8 100644 --- a/.github/workflows/new-build-prover-template.yml +++ b/.github/workflows/new-build-prover-template.yml @@ -127,7 +127,6 @@ jobs: if: matrix.components == 'proof-fri-gpu-compressor' run: | run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key - # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available # In our case it happens only when PR is created from fork - name: Wait for runner IP to be not rate-limited against GH API From 5bc70c85b1d752f69e75457729ccd07013e62e63 Mon Sep 17 00:00:00 2001 From: Aleksandr Stepanov Date: Fri, 8 Nov 2024 13:25:44 +0100 Subject: [PATCH 3/7] ci: Change schedule to monthly and add grouping to renovate (#3239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Change renovatebot pr schedule to monthly and grouping changes in one PR. ## Why ❔ Do not have a lot of PR's and save time to merge them and test. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- renovate.json | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/renovate.json b/renovate.json index eeccfee848dc..fd09d70ffe4b 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,13 @@ { - "extends": ["config:base", "schedule:earlyMondays","helpers:pinGitHubActionDigests"], + "extends": ["config:base", "helpers:pinGitHubActionDigests"], "enabledManagers": ["github-actions"], - "prCreation": "immediate" + "prCreation": "not-pending", + "groupName": "github actions monthly updates", + "schedule": ["monthly"], + "packageRules": [ + { + "managers": ["github-actions"], + "groupName": "all-github-actions-updates" + } + ] } From 25ec1c836eaba425c83091c48e728a77498b4e75 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 8 Nov 2024 12:26:34 +0000 Subject: [PATCH 4/7] chore(deps): update trufflesecurity/trufflehog digest to 781157a (#2881) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | trufflesecurity/trufflehog | action | digest | `0c66d30` -> `781157a` | --- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/matter-labs/zksync-era). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/secrets_scanner.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/secrets_scanner.yaml b/.github/workflows/secrets_scanner.yaml index fa896bf10561..9bb1ad0a2722 100644 --- a/.github/workflows/secrets_scanner.yaml +++ b/.github/workflows/secrets_scanner.yaml @@ -11,7 +11,7 @@ jobs: with: fetch-depth: 0 - name: TruffleHog OSS - uses: trufflesecurity/trufflehog@0c66d30c1f4075cee1aada2e1ab46dabb1b0071a + uses: trufflesecurity/trufflehog@781157ae368b2218a0a56b889387dd26faa20f97 with: path: ./ base: ${{ github.event.repository.default_branch }} From fd27507168de97b734f7cfd330e53b455a527617 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Fri, 8 Nov 2024 14:46:08 +0200 Subject: [PATCH 5/7] refactor(vm): Narrow down factory deps returned in execution result (#3220) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Returns just dynamic factory deps (= EVM bytecodes deployed in a transaction) instead of all deployed bytecodes. - Tests this functionality in `multivm` and `state_keeper`. ## Why ❔ Returning all factory deps is redundant and makes thing awkward for old VM versions that don't support EVM emulation. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- Cargo.lock | 1 + core/lib/multivm/Cargo.toml | 1 + .../src/glue/types/vm/vm_block_result.rs | 14 +- .../types/vm/vm_partial_execution_result.rs | 8 +- .../glue/types/vm/vm_tx_execution_result.rs | 12 +- core/lib/multivm/src/versions/shadow/tests.rs | 17 +- .../src/versions/testonly/evm_emulator.rs | 180 +++++++++++++++- .../vm_1_4_1/implementation/execution.rs | 4 +- .../vm_1_4_2/implementation/execution.rs | 4 +- .../implementation/execution.rs | 4 +- .../src/versions/vm_fast/evm_deploy_tracer.rs | 4 +- .../versions/vm_fast/tests/evm_emulator.rs | 20 +- core/lib/multivm/src/versions/vm_fast/vm.rs | 55 +++-- .../vm_latest/implementation/execution.rs | 8 +- .../vm_latest/old_vm/oracles/decommitter.rs | 20 +- .../versions/vm_latest/tests/evm_emulator.rs | 20 +- .../vm_latest/tracers/evm_deploy_tracer.rs | 11 +- .../vm_latest/types/internals/vm_state.rs | 28 +-- .../src/versions/vm_latest/utils/mod.rs | 24 --- core/lib/multivm/src/versions/vm_latest/vm.rs | 24 ++- .../implementation/execution.rs | 4 +- .../implementation/execution.rs | 4 +- core/lib/vm_executor/src/oneshot/mock.rs | 8 +- .../src/types/outputs/execution_result.rs | 50 ++++- .../src/types/outputs/finished_l1batch.rs | 9 +- core/lib/vm_interface/src/utils/shadow.rs | 16 +- core/node/api_server/src/web3/tests/vm.rs | 5 +- core/node/state_keeper/src/io/persistence.rs | 3 +- core/node/state_keeper/src/io/tests/mod.rs | 204 ++++++++++++------ core/node/state_keeper/src/keeper.rs | 9 +- .../state_keeper/src/seal_criteria/mod.rs | 3 - core/node/state_keeper/src/testonly/mod.rs | 12 +- .../src/testonly/test_batch_executor.rs | 15 +- core/node/state_keeper/src/tests/mod.rs | 20 +- .../src/updates/l1_batch_updates.rs | 3 - .../src/updates/l2_block_updates.rs | 22 +- core/node/state_keeper/src/updates/mod.rs | 7 +- .../contracts/mock-evm/mock-evm.sol | 100 ++++++++- yarn.lock | 181 +++++++++++++++- 39 files changed, 843 insertions(+), 291 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c5cee452b1ae..65ae365e3a2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11418,6 +11418,7 @@ dependencies = [ "itertools 0.10.5", "once_cell", "pretty_assertions", + "rand 0.8.5", "test-casing", "thiserror", "tracing", diff --git a/core/lib/multivm/Cargo.toml b/core/lib/multivm/Cargo.toml index eb770bf9b57e..27130bc2720d 100644 --- a/core/lib/multivm/Cargo.toml +++ b/core/lib/multivm/Cargo.toml @@ -43,6 +43,7 @@ ethabi.workspace = true [dev-dependencies] assert_matches.workspace = true pretty_assertions.workspace = true +rand.workspace = true test-casing.workspace = true zksync_test_account.workspace = true zksync_eth_signer.workspace = true diff --git a/core/lib/multivm/src/glue/types/vm/vm_block_result.rs b/core/lib/multivm/src/glue/types/vm/vm_block_result.rs index 50bb19938fe7..c4eb0b1741aa 100644 --- a/core/lib/multivm/src/glue/types/vm/vm_block_result.rs +++ b/core/lib/multivm/src/glue/types/vm/vm_block_result.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use circuit_sequencer_api_1_3_3::sort_storage_access::sort_storage_access_queries as sort_storage_access_queries_1_3_3; use itertools::Itertools; use zk_evm_1_3_1::aux_structures::LogQuery as LogQuery_1_3_1; @@ -47,7 +49,7 @@ impl GlueFrom for crate::interface::Fi circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, final_execution_state: CurrentExecutionState { events: value.full_result.events, @@ -104,7 +106,7 @@ impl GlueFrom for crate::interface::Fi circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, final_execution_state: CurrentExecutionState { events: value.full_result.events, @@ -160,7 +162,7 @@ impl GlueFrom for crate::interface: circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, final_execution_state: CurrentExecutionState { events: value.full_result.events, @@ -230,7 +232,7 @@ impl GlueFrom circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -263,7 +265,7 @@ impl GlueFrom circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -312,7 +314,7 @@ impl GlueFrom circuit_statistic: Default::default(), }, refunds: Refunds::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } diff --git a/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs b/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs index 4c4cffcc6876..fa251116b85c 100644 --- a/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs +++ b/core/lib/multivm/src/glue/types/vm/vm_partial_execution_result.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use crate::glue::{GlueFrom, GlueInto}; impl GlueFrom @@ -22,7 +24,7 @@ impl GlueFrom gas_refunded: 0, operator_suggested_refund: 0, }, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -49,7 +51,7 @@ impl GlueFrom gas_refunded: 0, operator_suggested_refund: 0, }, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } @@ -76,7 +78,7 @@ impl GlueFrom gas_refunded: 0, operator_suggested_refund: 0, }, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), } } } diff --git a/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs b/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs index 8978d4348edd..fcbcde990f37 100644 --- a/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs +++ b/core/lib/multivm/src/glue/types/vm/vm_tx_execution_result.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use crate::{ glue::{GlueFrom, GlueInto}, interface::{ @@ -66,14 +68,14 @@ impl GlueFrom VmExecutionResultAndLogs { result: ExecutionResult::Halt { reason: halt }, logs: Default::default(), statistics: Default::default(), refunds: Default::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, } } @@ -102,14 +104,14 @@ impl logs: Default::default(), statistics: Default::default(), refunds: Default::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, TxRevertReason::Halt(halt) => VmExecutionResultAndLogs { result: ExecutionResult::Halt { reason: halt }, logs: Default::default(), statistics: Default::default(), refunds: Default::default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), }, } } @@ -133,7 +135,7 @@ impl GlueFrom { unreachable!("Halt is the only revert reason for VM 5") diff --git a/core/lib/multivm/src/versions/shadow/tests.rs b/core/lib/multivm/src/versions/shadow/tests.rs index e6fb05e24069..4466d96a96b7 100644 --- a/core/lib/multivm/src/versions/shadow/tests.rs +++ b/core/lib/multivm/src/versions/shadow/tests.rs @@ -231,7 +231,22 @@ mod evm_emulator { #[test] fn mock_emulator_with_deployment() { - test_mock_emulator_with_deployment::(); + test_mock_emulator_with_deployment::(false); + } + + #[test] + fn mock_emulator_with_reverted_deployment() { + test_mock_emulator_with_deployment::(true); + } + + #[test] + fn mock_emulator_with_recursive_deployment() { + test_mock_emulator_with_recursive_deployment::(); + } + + #[test] + fn mock_emulator_with_partial_reverts() { + test_mock_emulator_with_partial_reverts::(); } #[test] diff --git a/core/lib/multivm/src/versions/testonly/evm_emulator.rs b/core/lib/multivm/src/versions/testonly/evm_emulator.rs index 6de394842aaa..a77274ec581c 100644 --- a/core/lib/multivm/src/versions/testonly/evm_emulator.rs +++ b/core/lib/multivm/src/versions/testonly/evm_emulator.rs @@ -1,6 +1,8 @@ use std::collections::HashMap; +use assert_matches::assert_matches; use ethabi::Token; +use rand::{rngs::StdRng, Rng, SeedableRng}; use zksync_contracts::{load_contract, read_bytecode, SystemContractCode}; use zksync_system_constants::{ CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L2_BASE_TOKEN_ADDRESS, @@ -18,7 +20,8 @@ use zksync_utils::{ use super::{default_system_env, TestedVm, VmTester, VmTesterBuilder}; use crate::interface::{ - storage::InMemoryStorage, TxExecutionMode, VmExecutionResultAndLogs, VmInterfaceExt, + storage::InMemoryStorage, ExecutionResult, TxExecutionMode, VmExecutionResultAndLogs, + VmInterfaceExt, }; const MOCK_DEPLOYER_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/MockContractDeployer.json"; @@ -146,11 +149,26 @@ pub(crate) fn test_tracing_evm_contract_deployment() { .execute_transaction_with_bytecode_compression(deploy_tx, true); assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); - let new_known_factory_deps = vm_result.new_known_factory_deps.unwrap(); - assert_eq!(new_known_factory_deps.len(), 2); // the deployed EraVM contract + EVM contract + // The EraVM contract also deployed in a transaction should be filtered out assert_eq!( - new_known_factory_deps[&expected_bytecode_hash], - evm_bytecode + vm_result.dynamic_factory_deps, + HashMap::from([(expected_bytecode_hash, evm_bytecode)]) + ); + + // "Deploy" a bytecode in another transaction and check that the first tx doesn't interfere with the returned `dynamic_factory_deps`. + let args = [Token::Bytes((0..32).rev().collect())]; + let evm_bytecode = ethabi::encode(&args); + let expected_bytecode_hash = hash_evm_bytecode(&evm_bytecode); + let execute = Execute::for_deploy(expected_bytecode_hash, vec![0; 32], &args); + let deploy_tx = account.get_l2_tx_for_execute(execute, None); + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(deploy_tx, true); + assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); + + assert_eq!( + vm_result.dynamic_factory_deps, + HashMap::from([(expected_bytecode_hash, evm_bytecode)]) ); } @@ -310,7 +328,7 @@ pub(crate) fn test_calling_to_mock_emulator_from_native_contract() assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); } -pub(crate) fn test_mock_emulator_with_deployment() { +pub(crate) fn test_mock_emulator_with_deployment(revert: bool) { let contract_address = Address::repeat_byte(0xaa); let mut vm = EvmTestBuilder::new(true, contract_address) .with_mock_deployer() @@ -329,6 +347,7 @@ pub(crate) fn test_mock_emulator_with_deployment() { .encode_input(&[ Token::FixedBytes(new_evm_bytecode_hash.0.into()), Token::Bytes(new_evm_bytecode.clone()), + Token::Bool(revert), ]) .unwrap(), value: 0.into(), @@ -336,16 +355,159 @@ pub(crate) fn test_mock_emulator_with_deployment() { }, None, ); + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(test_tx, true); + + assert_eq!(vm_result.result.is_failed(), revert, "{vm_result:?}"); + let expected_dynamic_deps = if revert { + HashMap::new() + } else { + HashMap::from([(new_evm_bytecode_hash, new_evm_bytecode)]) + }; + assert_eq!(vm_result.dynamic_factory_deps, expected_dynamic_deps); + + // Test that a following transaction can decommit / call EVM contracts deployed in the previous transaction. + let test_fn = mock_emulator_abi + .function("testCallToPreviousDeployment") + .unwrap(); + let test_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: test_fn.encode_input(&[]).unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(test_tx, true); + + if revert { + assert_matches!( + &vm_result.result, + ExecutionResult::Revert { output } + if output.to_string().contains("contract code length") + ); + } else { + assert!(!vm_result.result.is_failed(), "{vm_result:?}"); + } + assert!(vm_result.dynamic_factory_deps.is_empty(), "{vm_result:?}"); +} + +fn encode_deployment(hash: H256, bytecode: Vec) -> Token { + assert_eq!(bytecode.len(), 32); + Token::Tuple(vec![ + Token::FixedBytes(hash.0.to_vec()), + Token::FixedBytes(bytecode), + ]) +} + +pub(crate) fn test_mock_emulator_with_recursive_deployment() { + let contract_address = Address::repeat_byte(0xaa); + let mut vm = EvmTestBuilder::new(true, contract_address) + .with_mock_deployer() + .build::(); + let account = &mut vm.rich_accounts[0]; + + let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let bytecodes: HashMap<_, _> = (0_u8..10) + .map(|byte| { + let bytecode = vec![byte; 32]; + (hash_evm_bytecode(&bytecode), bytecode) + }) + .collect(); + let test_fn = mock_emulator_abi + .function("testRecursiveDeployment") + .unwrap(); + let deployments: Vec<_> = bytecodes + .iter() + .map(|(hash, code)| encode_deployment(*hash, code.clone())) + .collect(); + let test_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: test_fn.encode_input(&[Token::Array(deployments)]).unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + + let (_, vm_result) = vm + .vm + .execute_transaction_with_bytecode_compression(test_tx, true); + assert!(!vm_result.result.is_failed(), "{vm_result:?}"); + assert_eq!(vm_result.dynamic_factory_deps, bytecodes); +} + +pub(crate) fn test_mock_emulator_with_partial_reverts() { + for seed in [1, 10, 100, 1_000] { + println!("Testing with RNG seed {seed}"); + let mut rng = StdRng::seed_from_u64(seed); + test_mock_emulator_with_partial_reverts_and_rng::(&mut rng); + } +} + +fn test_mock_emulator_with_partial_reverts_and_rng(rng: &mut impl Rng) { + let contract_address = Address::repeat_byte(0xaa); + let mut vm = EvmTestBuilder::new(true, contract_address) + .with_mock_deployer() + .build::(); + let account = &mut vm.rich_accounts[0]; + + let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let all_bytecodes: HashMap<_, _> = (0_u8..10) + .map(|_| { + let bytecode = vec![rng.gen(); 32]; + (hash_evm_bytecode(&bytecode), bytecode) + }) + .collect(); + let should_revert: Vec<_> = (0..10).map(|_| rng.gen::()).collect(); + + let test_fn = mock_emulator_abi + .function("testDeploymentWithPartialRevert") + .unwrap(); + let deployments: Vec<_> = all_bytecodes + .iter() + .map(|(hash, code)| encode_deployment(*hash, code.clone())) + .collect(); + let revert_tokens: Vec<_> = should_revert.iter().copied().map(Token::Bool).collect(); + + let test_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: test_fn + .encode_input(&[Token::Array(deployments), Token::Array(revert_tokens)]) + .unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + let (_, vm_result) = vm .vm .execute_transaction_with_bytecode_compression(test_tx, true); assert!(!vm_result.result.is_failed(), "{vm_result:?}"); - let factory_deps = vm_result.new_known_factory_deps.unwrap(); + let dynamic_deps = &vm_result.dynamic_factory_deps; assert_eq!( - factory_deps, - HashMap::from([(new_evm_bytecode_hash, new_evm_bytecode)]) + dynamic_deps.len(), + should_revert + .iter() + .map(|flag| !flag as usize) + .sum::(), + "{dynamic_deps:?}" ); + for ((bytecode_hash, bytecode), &should_revert) in all_bytecodes.iter().zip(&should_revert) { + assert_eq!( + dynamic_deps.get(bytecode_hash), + (!should_revert).then_some(bytecode), + "hash={bytecode_hash:?}, deps={dynamic_deps:?}" + ); + } } pub(crate) fn test_mock_emulator_with_delegate_call() { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs b/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs index cc199fef9416..35ff73071ca6 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_4_1::aux_structures::Timestamp; @@ -99,7 +99,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs b/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs index f6e49cd8b149..341584168be4 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/implementation/execution.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use zk_evm_1_4_1::aux_structures::Timestamp; use crate::{ @@ -96,7 +98,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs index b8b939f86731..e942f0fc4245 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_4_0::aux_structures::Timestamp; @@ -93,7 +93,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs index 1bc0ff5134f1..62aba8df5b9b 100644 --- a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs @@ -16,8 +16,8 @@ use super::utils::read_fat_pointer; pub(super) struct DynamicBytecodes(Rc>>>); impl DynamicBytecodes { - pub(super) fn take(&self, hash: U256) -> Option> { - self.0.borrow_mut().remove(&hash) + pub(super) fn map(&self, hash: U256, f: impl FnOnce(&[u8]) -> R) -> Option { + self.0.borrow().get(&hash).map(|code| f(code)) } fn insert(&self, hash: U256, bytecode: Vec) { diff --git a/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs b/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs index cb7d54dba29f..7b5ea3e4447b 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/evm_emulator.rs @@ -4,7 +4,8 @@ use crate::{ versions::testonly::evm_emulator::{ test_calling_to_mock_emulator_from_native_contract, test_mock_emulator_basics, test_mock_emulator_with_delegate_call, test_mock_emulator_with_deployment, - test_mock_emulator_with_payment, test_mock_emulator_with_recursion, + test_mock_emulator_with_partial_reverts, test_mock_emulator_with_payment, + test_mock_emulator_with_recursion, test_mock_emulator_with_recursive_deployment, test_mock_emulator_with_static_call, test_tracing_evm_contract_deployment, }, vm_fast::Vm, @@ -39,7 +40,22 @@ fn calling_to_mock_emulator_from_native_contract() { #[test] fn mock_emulator_with_deployment() { - test_mock_emulator_with_deployment::>(); + test_mock_emulator_with_deployment::>(false); +} + +#[test] +fn mock_emulator_with_reverted_deployment() { + test_mock_emulator_with_deployment::>(false); +} + +#[test] +fn mock_emulator_with_recursive_deployment() { + test_mock_emulator_with_recursive_deployment::>(); +} + +#[test] +fn mock_emulator_with_partial_reverts() { + test_mock_emulator_with_partial_reverts::>(); } #[test] diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index f90bac149d0f..d18f7b91f323 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -55,7 +55,6 @@ use crate::{ get_result_success_first_slot, get_vm_hook_params_start_position, get_vm_hook_position, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET, VM_HOOK_PARAMS_COUNT, }, - utils::extract_bytecodes_marked_as_known, MultiVMSubversion, }, }; @@ -653,8 +652,10 @@ impl Vm { // We need to filter out bytecodes the deployment of which may have been reverted; the tracer is not aware of reverts. // To do this, we check bytecodes against deployer events. - let factory_deps_marked_as_known = extract_bytecodes_marked_as_known(&logs.events); - let new_known_factory_deps = self.world.decommit_bytecodes(&factory_deps_marked_as_known); + let factory_deps_marked_as_known = VmEvent::extract_bytecodes_marked_as_known(&logs.events); + let dynamic_factory_deps = self + .world + .decommit_dynamic_bytecodes(factory_deps_marked_as_known); VmExecutionResultAndLogs { result: result.execution_result, @@ -671,7 +672,7 @@ impl Vm { total_log_queries: 0, }, refunds: result.refunds, - new_known_factory_deps: Some(new_known_factory_deps), + dynamic_factory_deps, } } } @@ -848,16 +849,15 @@ impl World { ) } - fn decommit_bytecodes(&self, hashes: &[H256]) -> HashMap> { - let bytecodes = hashes.iter().map(|&hash| { - let int_hash = h256_to_u256(hash); + fn decommit_dynamic_bytecodes( + &self, + candidate_hashes: impl Iterator, + ) -> HashMap> { + let bytecodes = candidate_hashes.filter_map(|hash| { let bytecode = self - .bytecode_cache - .get(&int_hash) - .cloned() - .or_else(|| self.dynamic_bytecodes.take(int_hash)) - .unwrap_or_else(|| panic!("Bytecode with hash {hash:?} not found")); - (hash, bytecode) + .dynamic_bytecodes + .map(h256_to_u256(hash), <[u8]>::to_vec)?; + Some((hash, bytecode)) }); bytecodes.collect() } @@ -933,17 +933,28 @@ impl zksync_vm2::World for World { self.program_cache .entry(hash) .or_insert_with(|| { - let bytecode = self.bytecode_cache.entry(hash).or_insert_with(|| { - // Since we put the bytecode in the cache anyway, it's safe to *take* it out from `dynamic_bytecodes` - // and put it in `bytecode_cache`. - self.dynamic_bytecodes - .take(hash) - .or_else(|| self.storage.load_factory_dep(u256_to_h256(hash))) + let cached = self + .bytecode_cache + .get(&hash) + .map(|code| Program::new(code, false)) + .or_else(|| { + self.dynamic_bytecodes + .map(hash, |code| Program::new(code, false)) + }); + + if let Some(cached) = cached { + cached + } else { + let code = self + .storage + .load_factory_dep(u256_to_h256(hash)) .unwrap_or_else(|| { panic!("VM tried to decommit nonexistent bytecode: {hash:?}"); - }) - }); - Program::new(bytecode, false) + }); + let program = Program::new(&code, false); + self.bytecode_cache.insert(hash, code); + program + } }) .clone() } diff --git a/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs b/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs index d9331720ce28..f8acfaec4259 100644 --- a/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_latest/implementation/execution.rs @@ -1,6 +1,7 @@ use std::mem; use zk_evm_1_5_0::aux_structures::Timestamp; +use zksync_vm_interface::VmEvent; use crate::{ interface::{ @@ -14,7 +15,6 @@ use crate::{ circuits_capacity::circuit_statistic_from_cycles, dispatcher::TracerDispatcher, DefaultExecutionTracer, PubdataTracer, RefundsTracer, }, - utils::extract_bytecodes_marked_as_known, vm::Vm, }, HistoryMode, @@ -101,8 +101,8 @@ impl Vm { circuit_statistic_from_cycles(tx_tracer.circuits_tracer.statistics), ); let result = tx_tracer.result_tracer.into_result(); - let factory_deps_marked_as_known = extract_bytecodes_marked_as_known(&logs.events); - let new_known_factory_deps = self.decommit_bytecodes(&factory_deps_marked_as_known); + let factory_deps_marked_as_known = VmEvent::extract_bytecodes_marked_as_known(&logs.events); + let dynamic_factory_deps = self.decommit_dynamic_bytecodes(factory_deps_marked_as_known); *dispatcher = tx_tracer.dispatcher; let result = VmExecutionResultAndLogs { @@ -110,7 +110,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: Some(new_known_factory_deps), + dynamic_factory_deps, }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs index d91fbfdb24df..507e3d8c7598 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs @@ -1,4 +1,7 @@ -use std::{collections::HashMap, fmt::Debug}; +use std::{ + collections::{HashMap, HashSet}, + fmt::Debug, +}; use zk_evm_1_5_0::{ abstractions::{DecommittmentProcessor, Memory, MemoryType}, @@ -27,6 +30,9 @@ pub struct DecommitterOracle { /// The cache of bytecodes that the bootloader "knows", but that are not necessarily in the database. /// And it is also used as a database cache. pub known_bytecodes: HistoryRecorder>, H>, + /// Subset of `known_bytecodes` that are dynamically deployed during VM execution. Currently, + /// only EVM bytecodes can be deployed like that. + pub dynamic_bytecode_hashes: HashSet, /// Stores pages of memory where certain code hashes have already been decommitted. /// It is expected that they all are present in the DB. // `decommitted_code_hashes` history is necessary @@ -40,6 +46,7 @@ impl DecommitterOracle { Self { storage, known_bytecodes: HistoryRecorder::default(), + dynamic_bytecode_hashes: HashSet::default(), decommitted_code_hashes: HistoryRecorder::default(), decommitment_requests: HistoryRecorder::default(), } @@ -76,6 +83,17 @@ impl DecommitterOracle { } } + pub fn insert_dynamic_bytecode( + &mut self, + bytecode_hash: U256, + bytecode: Vec, + timestamp: Timestamp, + ) { + self.dynamic_bytecode_hashes.insert(bytecode_hash); + self.known_bytecodes + .insert(bytecode_hash, bytecode, timestamp); + } + pub fn get_decommitted_bytecodes_after_timestamp(&self, timestamp: Timestamp) -> usize { // Note, that here we rely on the fact that for each used bytecode // there is one and only one corresponding event in the history of it. diff --git a/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs b/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs index b9b96c670983..5b6e24eefbf0 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/evm_emulator.rs @@ -4,7 +4,8 @@ use crate::{ versions::testonly::evm_emulator::{ test_calling_to_mock_emulator_from_native_contract, test_mock_emulator_basics, test_mock_emulator_with_delegate_call, test_mock_emulator_with_deployment, - test_mock_emulator_with_payment, test_mock_emulator_with_recursion, + test_mock_emulator_with_partial_reverts, test_mock_emulator_with_payment, + test_mock_emulator_with_recursion, test_mock_emulator_with_recursive_deployment, test_mock_emulator_with_static_call, test_tracing_evm_contract_deployment, }, vm_latest::{HistoryEnabled, Vm}, @@ -39,7 +40,22 @@ fn calling_to_mock_emulator_from_native_contract() { #[test] fn mock_emulator_with_deployment() { - test_mock_emulator_with_deployment::>(); + test_mock_emulator_with_deployment::>(false); +} + +#[test] +fn mock_emulator_with_reverted_deployment() { + test_mock_emulator_with_deployment::>(true); +} + +#[test] +fn mock_emulator_with_recursive_deployment() { + test_mock_emulator_with_recursive_deployment::>(); +} + +#[test] +fn mock_emulator_with_partial_reverts() { + test_mock_emulator_with_partial_reverts::>(); } #[test] diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs index becc4f225276..61c8ef0b5abf 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs @@ -89,14 +89,13 @@ impl VmTracer for EvmDeployTracer { state: &mut ZkSyncVmState, _bootloader_state: &mut BootloaderState, ) -> TracerExecutionStatus { + let timestamp = Timestamp(state.local_state.timestamp); for published_bytecode in mem::take(&mut self.pending_bytecodes) { - let hash = hash_evm_bytecode(&published_bytecode); + let hash = h256_to_u256(hash_evm_bytecode(&published_bytecode)); let as_words = bytes_to_be_words(published_bytecode); - - state.decommittment_processor.populate( - vec![(h256_to_u256(hash), as_words)], - Timestamp(state.local_state.timestamp), - ); + state + .decommittment_processor + .insert_dynamic_bytecode(hash, as_words, timestamp); } TracerExecutionStatus::Continue } diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs index d25f66361f1b..90bb0c610e2c 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs @@ -83,27 +83,21 @@ pub(crate) fn new_vm_state( let mut memory = SimpleMemory::default(); let event_sink = InMemoryEventSink::default(); let precompiles_processor = PrecompilesProcessorWithHistory::::default(); + let mut decommittment_processor: DecommitterOracle = DecommitterOracle::new(storage); - - decommittment_processor.populate( - vec![( - h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), - )], - Timestamp(0), - ); - + let mut initial_bytecodes = vec![( + h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), + system_env + .base_system_smart_contracts + .default_aa + .code + .clone(), + )]; if let Some(evm_emulator) = &system_env.base_system_smart_contracts.evm_emulator { - decommittment_processor.populate( - vec![(h256_to_u256(evm_emulator.hash), evm_emulator.code.clone())], - Timestamp(0), - ); + initial_bytecodes.push((h256_to_u256(evm_emulator.hash), evm_emulator.code.clone())); } + decommittment_processor.populate(initial_bytecodes, Timestamp(0)); memory.populate( vec![( diff --git a/core/lib/multivm/src/versions/vm_latest/utils/mod.rs b/core/lib/multivm/src/versions/vm_latest/utils/mod.rs index aeb66755f514..97483633bc54 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/mod.rs @@ -1,9 +1,6 @@ //! Utility functions for the VM. -use once_cell::sync::Lazy; use zk_evm_1_5_0::aux_structures::MemoryPage; -use zksync_types::{H256, KNOWN_CODES_STORAGE_ADDRESS}; -use zksync_vm_interface::VmEvent; pub mod fee; pub mod l2_blocks; @@ -14,24 +11,3 @@ pub mod transaction_encoding; pub const fn heap_page_from_base(base: MemoryPage) -> MemoryPage { MemoryPage(base.0 + 2) } - -/// Extracts all bytecodes marked as known on the system contracts. -pub fn extract_bytecodes_marked_as_known(all_generated_events: &[VmEvent]) -> Vec { - static PUBLISHED_BYTECODE_SIGNATURE: Lazy = Lazy::new(|| { - ethabi::long_signature( - "MarkedAsKnown", - &[ethabi::ParamType::FixedBytes(32), ethabi::ParamType::Bool], - ) - }); - - all_generated_events - .iter() - .filter(|event| { - // Filter events from the deployer contract that match the expected signature. - event.address == KNOWN_CODES_STORAGE_ADDRESS - && event.indexed_topics.len() == 3 - && event.indexed_topics[0] == *PUBLISHED_BYTECODE_SIGNATURE - }) - .map(|event| event.indexed_topics[1]) - .collect() -} diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index ef6cee454a87..ff90eb14ee42 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -85,16 +85,24 @@ impl Vm { self.state.local_state.callstack.current.ergs_remaining } - pub(crate) fn decommit_bytecodes(&self, hashes: &[H256]) -> HashMap> { - let bytecodes = hashes.iter().map(|&hash| { - let bytecode_words = self - .state - .decommittment_processor + pub(crate) fn decommit_dynamic_bytecodes( + &self, + candidate_hashes: impl Iterator, + ) -> HashMap> { + let decommitter = &self.state.decommittment_processor; + let bytecodes = candidate_hashes.filter_map(|hash| { + let int_hash = h256_to_u256(hash); + if !decommitter.dynamic_bytecode_hashes.contains(&int_hash) { + return None; + } + let bytecode = decommitter .known_bytecodes .inner() - .get(&h256_to_u256(hash)) - .unwrap_or_else(|| panic!("Bytecode with hash {hash:?} not found")); - (hash, be_words_to_bytes(bytecode_words)) + .get(&int_hash) + .unwrap_or_else(|| { + panic!("Bytecode with hash {hash:?} not found"); + }); + Some((hash, be_words_to_bytes(bytecode))) }); bytecodes.collect() } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs index 9462a89be2ab..e8d19dfbba97 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_3_3::aux_structures::Timestamp; @@ -90,7 +90,7 @@ impl Vm { logs, statistics, refunds, - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; (stop_reason, result) diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs index b1ad4d257b77..d3d511ed5398 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/execution.rs @@ -1,4 +1,4 @@ -use std::mem; +use std::{collections::HashMap, mem}; use zk_evm_1_3_3::aux_structures::Timestamp; @@ -88,7 +88,7 @@ impl Vm { .refund_tracer .map(|r| r.get_refunds()) .unwrap_or_default(), - new_known_factory_deps: None, + dynamic_factory_deps: HashMap::new(), // dynamic bytecode deployment is not supported }; tx_tracer.dispatcher.save_results(&mut result); diff --git a/core/lib/vm_executor/src/oneshot/mock.rs b/core/lib/vm_executor/src/oneshot/mock.rs index e211328b5eca..89eaf3c75e29 100644 --- a/core/lib/vm_executor/src/oneshot/mock.rs +++ b/core/lib/vm_executor/src/oneshot/mock.rs @@ -74,13 +74,7 @@ impl MockOneshotExecutor { { Box::new( move |tx: &Transaction, env: &OneshotEnv| -> VmExecutionResultAndLogs { - VmExecutionResultAndLogs { - result: responses(tx, env), - logs: Default::default(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, - } + VmExecutionResultAndLogs::mock(responses(tx, env)) }, ) } diff --git a/core/lib/vm_interface/src/types/outputs/execution_result.rs b/core/lib/vm_interface/src/types/outputs/execution_result.rs index 018ea075db51..9bb784fbf71c 100644 --- a/core/lib/vm_interface/src/types/outputs/execution_result.rs +++ b/core/lib/vm_interface/src/types/outputs/execution_result.rs @@ -21,10 +21,6 @@ const L1_MESSAGE_EVENT_SIGNATURE: H256 = H256([ 58, 54, 228, 114, 145, 244, 32, 31, 175, 19, 127, 171, 8, 29, 146, 41, 91, 206, 45, 83, 190, 44, 108, 166, 139, 168, 44, 127, 170, 156, 226, 65, ]); -const PUBLISHED_BYTECODE_SIGNATURE: H256 = H256([ - 201, 71, 34, 255, 19, 234, 207, 83, 84, 124, 71, 65, 218, 181, 34, 131, 83, 160, 89, 56, 255, - 205, 213, 212, 162, 213, 51, 174, 14, 97, 130, 135, -]); pub fn bytecode_len_in_bytes(bytecodehash: H256) -> usize { usize::from(u16::from_be_bytes([bytecodehash[2], bytecodehash[3]])) * 32 @@ -50,6 +46,11 @@ impl VmEvent { 72, 13, 60, 159, 114, 123, 94, 92, 18, 3, 212, 198, 31, 177, 133, 211, 127, 8, 230, 178, 220, 94, 155, 191, 152, 89, 27, 26, 122, 221, 245, 124, ]); + /// Long signature of the known bytecodes storage bytecode publication event (`MarkedAsKnown`). + pub const PUBLISHED_BYTECODE_SIGNATURE: H256 = H256([ + 201, 71, 34, 255, 19, 234, 207, 83, 84, 124, 71, 65, 218, 181, 34, 131, 83, 160, 89, 56, + 255, 205, 213, 212, 162, 213, 51, 174, 14, 97, 130, 135, + ]); /// Extracts all the "long" L2->L1 messages that were submitted by the L1Messenger contract. pub fn extract_long_l2_to_l1_messages(events: &[Self]) -> Vec> { @@ -79,12 +80,25 @@ impl VmEvent { // Filter events from the deployer contract that match the expected signature. event.address == KNOWN_CODES_STORAGE_ADDRESS && event.indexed_topics.len() == 3 - && event.indexed_topics[0] == PUBLISHED_BYTECODE_SIGNATURE + && event.indexed_topics[0] == Self::PUBLISHED_BYTECODE_SIGNATURE && event.indexed_topics[2] != H256::zero() }) .map(|event| event.indexed_topics[1]) .collect() } + + /// Extracts all bytecodes marked as known on the system contracts. + pub fn extract_bytecodes_marked_as_known(events: &[Self]) -> impl Iterator + '_ { + events + .iter() + .filter(|event| { + // Filter events from the deployer contract that match the expected signature. + event.address == KNOWN_CODES_STORAGE_ADDRESS + && event.indexed_topics.len() == 3 + && event.indexed_topics[0] == Self::PUBLISHED_BYTECODE_SIGNATURE + }) + .map(|event| event.indexed_topics[1]) + } } /// Refunds produced for the user. @@ -120,10 +134,10 @@ pub struct VmExecutionResultAndLogs { pub logs: VmExecutionLogs, pub statistics: VmExecutionStatistics, pub refunds: Refunds, - /// Bytecodes decommitted during VM execution. `None` if not computed by the VM. - // FIXME: currently, this is only filled up by `vm_latest`; probably makes sense to narrow down - // to *dynamic* factory deps, so that `HashMap::new()` is a valid value for VMs not supporting EVM emulation. - pub new_known_factory_deps: Option>>, + /// Dynamic bytecodes decommitted during VM execution (i.e., not present in the storage at the start of VM execution + /// or in `factory_deps` fields of executed transactions). Currently, the only kind of such codes are EVM bytecodes. + /// Correspondingly, they may only be present if supported by the VM version, and if the VM is initialized with the EVM emulator base system contract. + pub dynamic_factory_deps: HashMap>, } #[derive(Debug, Clone, PartialEq)] @@ -144,6 +158,22 @@ impl ExecutionResult { } impl VmExecutionResultAndLogs { + /// Creates a mock full result based on the provided base result. + pub fn mock(result: ExecutionResult) -> Self { + Self { + result, + logs: VmExecutionLogs::default(), + statistics: VmExecutionStatistics::default(), + refunds: Refunds::default(), + dynamic_factory_deps: HashMap::new(), + } + } + + /// Creates a mock successful result with no payload. + pub fn mock_success() -> Self { + Self::mock(ExecutionResult::Success { output: vec![] }) + } + pub fn get_execution_metrics(&self, tx: Option<&Transaction>) -> VmExecutionMetrics { let contracts_deployed = tx .map(|tx| tx.execute.factory_deps.len() as u16) @@ -414,6 +444,6 @@ mod tests { "MarkedAsKnown", &[ethabi::ParamType::FixedBytes(32), ethabi::ParamType::Bool], ); - assert_eq!(PUBLISHED_BYTECODE_SIGNATURE, expected_signature); + assert_eq!(VmEvent::PUBLISHED_BYTECODE_SIGNATURE, expected_signature); } } diff --git a/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs b/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs index 8f7c1d4fb0d6..7e90d425ab15 100644 --- a/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs +++ b/core/lib/vm_interface/src/types/outputs/finished_l1batch.rs @@ -1,7 +1,6 @@ use zksync_types::writes::StateDiffRecord; use super::{BootloaderMemory, CurrentExecutionState, VmExecutionResultAndLogs}; -use crate::{ExecutionResult, Refunds, VmExecutionLogs, VmExecutionStatistics}; /// State of the VM after the batch execution. #[derive(Debug, Clone)] @@ -21,13 +20,7 @@ pub struct FinishedL1Batch { impl FinishedL1Batch { pub fn mock() -> Self { FinishedL1Batch { - block_tip_execution_result: VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, - logs: VmExecutionLogs::default(), - statistics: VmExecutionStatistics::default(), - refunds: Refunds::default(), - new_known_factory_deps: None, - }, + block_tip_execution_result: VmExecutionResultAndLogs::mock_success(), final_execution_state: CurrentExecutionState { events: vec![], deduplicated_storage_logs: vec![], diff --git a/core/lib/vm_interface/src/utils/shadow.rs b/core/lib/vm_interface/src/utils/shadow.rs index 060c04298547..0883971f4de8 100644 --- a/core/lib/vm_interface/src/utils/shadow.rs +++ b/core/lib/vm_interface/src/utils/shadow.rs @@ -190,15 +190,13 @@ impl CheckDivergence for VmExecutionResultAndLogs { &other.statistics.computational_gas_used, ); - if let (Some(these_deps), Some(other_deps)) = - (&self.new_known_factory_deps, &other.new_known_factory_deps) - { - // Order deps to have a more reasonable diff on a mismatch - let these_deps = these_deps.iter().collect::>(); - let other_deps = other_deps.iter().collect::>(); - errors.check_match("new_known_factory_deps", &these_deps, &other_deps); - } - + // Order deps to have a more reasonable diff on a mismatch + let these_deps = self.dynamic_factory_deps.iter().collect::>(); + let other_deps = other + .dynamic_factory_deps + .iter() + .collect::>(); + errors.check_match("dynamic_factory_deps", &these_deps, &other_deps); errors } } diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index 7dd0164198a1..4e0426de7bfa 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -638,11 +638,8 @@ impl HttpTest for SendTransactionWithDetailedOutputTest { assert_eq!(env.l1_batch.first_l2_block.number, 1); VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, logs: vm_execution_logs.clone(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, + ..VmExecutionResultAndLogs::mock_success() } }); tx_executor diff --git a/core/node/state_keeper/src/io/persistence.rs b/core/node/state_keeper/src/io/persistence.rs index 06f1972a02aa..8bfd812c8a1f 100644 --- a/core/node/state_keeper/src/io/persistence.rs +++ b/core/node/state_keeper/src/io/persistence.rs @@ -379,7 +379,7 @@ impl StateKeeperOutputHandler for TreeWritesPersistence { #[cfg(test)] mod tests { - use std::collections::{HashMap, HashSet}; + use std::collections::HashSet; use assert_matches::assert_matches; use futures::FutureExt; @@ -510,7 +510,6 @@ mod tests { tx, tx_result, vec![], - HashMap::new(), BlockGasCount::default(), VmExecutionMetrics::default(), vec![], diff --git a/core/node/state_keeper/src/io/tests/mod.rs b/core/node/state_keeper/src/io/tests/mod.rs index adef238fe928..7196236475df 100644 --- a/core/node/state_keeper/src/io/tests/mod.rs +++ b/core/node/state_keeper/src/io/tests/mod.rs @@ -1,7 +1,4 @@ -use std::{ - collections::HashMap, - time::{Duration, SystemTime, UNIX_EPOCH}, -}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; use test_casing::test_casing; use zksync_contracts::BaseSystemContractsHashes; @@ -14,15 +11,19 @@ use zksync_multivm::{ utils::derive_base_fee_and_gas_per_pubdata, }; use zksync_node_test_utils::prepare_recovery_snapshot; +use zksync_system_constants::KNOWN_CODES_STORAGE_ADDRESS; use zksync_types::{ block::{BlockGasCount, L2BlockHasher}, - commitment::L1BatchCommitmentMode, + commitment::{L1BatchCommitmentMode, PubdataParams}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, l2::L2Tx, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, TransactionTimeRangeConstraint, H256, U256, }; -use zksync_utils::time::seconds_since_epoch; +use zksync_utils::{ + bytecode::{hash_bytecode, hash_evm_bytecode}, + time::seconds_since_epoch, +}; use self::tester::Tester; use crate::{ @@ -229,6 +230,29 @@ async fn l1_batch_timestamp_respects_prev_l2_block_with_clock_skew( test_timestamps_are_distinct(connection_pool, current_timestamp + 2, true, tester).await; } +fn create_block_seal_command( + l1_batch_number: L1BatchNumber, + l2_block: L2BlockUpdates, +) -> L2BlockSealCommand { + L2BlockSealCommand { + l1_batch_number, + l2_block, + first_tx_index: 0, + fee_account_address: Address::repeat_byte(0x23), + fee_input: BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { + l1_gas_price: 100, + fair_l2_gas_price: 100, + fair_pubdata_price: 100, + }), + base_fee_per_gas: 10, + base_system_contracts_hashes: BaseSystemContractsHashes::default(), + protocol_version: Some(ProtocolVersionId::latest()), + l2_legacy_shared_bridge_addr: Some(Address::default()), + pre_insert_txs: false, + pubdata_params: PubdataParams::default(), + } +} + #[tokio::test] async fn processing_storage_logs_when_sealing_l2_block() { let connection_pool = @@ -261,7 +285,6 @@ async fn processing_storage_logs_when_sealing_l2_block() { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); @@ -280,28 +303,11 @@ async fn processing_storage_logs_when_sealing_l2_block() { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); let l1_batch_number = L1BatchNumber(2); - let seal_command = L2BlockSealCommand { - l1_batch_number, - l2_block, - first_tx_index: 0, - fee_account_address: Address::repeat_byte(0x23), - fee_input: BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { - l1_gas_price: 100, - fair_l2_gas_price: 100, - fair_pubdata_price: 100, - }), - base_fee_per_gas: 10, - base_system_contracts_hashes: BaseSystemContractsHashes::default(), - protocol_version: Some(ProtocolVersionId::latest()), - l2_legacy_shared_bridge_addr: Some(Address::default()), - pre_insert_txs: false, - pubdata_params: Default::default(), - }; + let seal_command = create_block_seal_command(l1_batch_number, l2_block); connection_pool .connection() .await @@ -371,28 +377,11 @@ async fn processing_events_when_sealing_l2_block() { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); } - let seal_command = L2BlockSealCommand { - l1_batch_number, - l2_block, - first_tx_index: 0, - fee_account_address: Address::repeat_byte(0x23), - fee_input: BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { - l1_gas_price: 100, - fair_l2_gas_price: 100, - fair_pubdata_price: 100, - }), - base_fee_per_gas: 10, - base_system_contracts_hashes: BaseSystemContractsHashes::default(), - protocol_version: Some(ProtocolVersionId::latest()), - l2_legacy_shared_bridge_addr: Some(Address::default()), - pre_insert_txs: false, - pubdata_params: Default::default(), - }; + let seal_command = create_block_seal_command(l1_batch_number, l2_block); pool.connection() .await .unwrap() @@ -416,6 +405,114 @@ async fn processing_events_when_sealing_l2_block() { } } +fn bytecode_publishing_events( + l1_batch_number: L1BatchNumber, + tx_index: u32, + bytecode_hashes: impl Iterator, +) -> Vec { + bytecode_hashes + .map(|bytecode_hash| VmEvent { + location: (l1_batch_number, tx_index), + address: KNOWN_CODES_STORAGE_ADDRESS, + indexed_topics: vec![ + VmEvent::PUBLISHED_BYTECODE_SIGNATURE, + bytecode_hash, + H256::from_low_u64_be(1), // sentBytecodeToL1 + ], + value: vec![], + }) + .collect() +} + +#[tokio::test] +async fn processing_dynamic_factory_deps_when_sealing_l2_block() { + let pool = + ConnectionPool::::constrained_test_pool(L2BlockSealProcess::subtasks_len()).await; + let l1_batch_number = L1BatchNumber(2); + let l2_block_number = L2BlockNumber(3); + let mut l2_block = L2BlockUpdates::new( + 0, + l2_block_number, + H256::zero(), + 1, + ProtocolVersionId::latest(), + ); + + let static_factory_deps: Vec<_> = (0_u8..10) + .map(|byte| { + let era_bytecode = vec![byte; 32]; + (hash_bytecode(&era_bytecode), era_bytecode) + }) + .collect(); + let dynamic_factory_deps: Vec<_> = (0_u8..10) + .map(|byte| { + let evm_bytecode = vec![byte; 96]; + (hash_evm_bytecode(&evm_bytecode), evm_bytecode) + }) + .collect(); + let mut all_factory_deps = static_factory_deps.clone(); + all_factory_deps.extend_from_slice(&dynamic_factory_deps); + + let events = bytecode_publishing_events( + l1_batch_number, + 0, + static_factory_deps + .iter() + .chain(&dynamic_factory_deps) + .map(|(hash, _)| *hash), + ); + + let mut tx = create_transaction(10, 100); + tx.execute.factory_deps = static_factory_deps + .into_iter() + .map(|(_, bytecode)| bytecode) + .collect(); + let mut execution_result = create_execution_result([]); + execution_result.dynamic_factory_deps = dynamic_factory_deps.into_iter().collect(); + execution_result.logs.events = events; + l2_block.extend_from_executed_transaction( + tx, + execution_result, + BlockGasCount::default(), + VmExecutionMetrics::default(), + vec![], + vec![], + ); + + assert_eq!( + l2_block.new_factory_deps.len(), + all_factory_deps.len(), + "{:?}", + l2_block.new_factory_deps + ); + for (hash, bytecode) in &all_factory_deps { + assert_eq!( + l2_block.new_factory_deps.get(hash), + Some(bytecode), + "{hash:?}" + ); + } + + let seal_command = create_block_seal_command(l1_batch_number, l2_block); + pool.connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + seal_command.seal(pool.clone()).await.unwrap(); + + let mut conn = pool.connection().await.unwrap(); + let persisted_factory_deps = conn + .factory_deps_dal() + .dump_all_factory_deps_for_tests() + .await; + for (hash, bytecode) in &all_factory_deps { + assert_eq!(persisted_factory_deps.get(hash), Some(bytecode), "{hash:?}"); + } +} + #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCommitmentMode) { @@ -445,15 +542,7 @@ async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCom tx_filter.gas_per_pubdata, TransactionTimeRangeConstraint::default(), ); - storage - .transactions_dal() - .insert_transaction_l2( - &tx, - TransactionExecutionMetrics::default(), - ValidationTraces::default(), - ) - .await - .unwrap(); + insert_l2_transaction(&mut storage, &tx).await; let previous_batch_hash = mempool .load_batch_state_hash(snapshot_recovery.l1_batch_number) @@ -479,7 +568,6 @@ async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCom tx.into(), create_execution_result([]), vec![], - HashMap::new(), BlockGasCount::default(), VmExecutionMetrics::default(), vec![], @@ -603,15 +691,7 @@ async fn continue_unsealed_batch_on_restart(commitment_mode: L1BatchCommitmentMo tx_filter.gas_per_pubdata, TransactionTimeRangeConstraint::default(), ); - storage - .transactions_dal() - .insert_transaction_l2( - &tx, - TransactionExecutionMetrics::default(), - ValidationTraces::default(), - ) - .await - .unwrap(); + insert_l2_transaction(&mut storage, &tx).await; let old_l1_batch_params = mempool .wait_for_new_batch_params(&cursor, Duration::from_secs(10)) diff --git a/core/node/state_keeper/src/keeper.rs b/core/node/state_keeper/src/keeper.rs index 60e206038990..fe37ee8d8dd6 100644 --- a/core/node/state_keeper/src/keeper.rs +++ b/core/node/state_keeper/src/keeper.rs @@ -503,9 +503,8 @@ impl ZkSyncStateKeeper { updates_manager.extend_from_executed_transaction( tx, - *tx_result.clone(), + *tx_result, compressed_bytecodes, - tx_result.new_known_factory_deps.unwrap_or_default(), tx_l1_gas_this_tx, tx_execution_metrics, call_tracer_result, @@ -629,9 +628,8 @@ impl ZkSyncStateKeeper { } = *tx_metrics; updates_manager.extend_from_executed_transaction( tx, - *tx_result.clone(), + *tx_result, compressed_bytecodes, - tx_result.new_known_factory_deps.unwrap_or_default(), tx_l1_gas_this_tx, tx_execution_metrics, call_tracer_result, @@ -711,9 +709,8 @@ impl ZkSyncStateKeeper { } = *tx_metrics; updates_manager.extend_from_executed_transaction( tx, - *tx_result.clone(), + *tx_result, compressed_bytecodes, - tx_result.new_known_factory_deps.unwrap_or_default(), tx_l1_gas_this_tx, tx_execution_metrics, call_tracer_result, diff --git a/core/node/state_keeper/src/seal_criteria/mod.rs b/core/node/state_keeper/src/seal_criteria/mod.rs index b82d61666fbf..c10b01e7e73d 100644 --- a/core/node/state_keeper/src/seal_criteria/mod.rs +++ b/core/node/state_keeper/src/seal_criteria/mod.rs @@ -278,8 +278,6 @@ impl L2BlockMaxPayloadSizeSealer { #[cfg(test)] mod tests { - use std::collections::HashMap; - use zksync_utils::time::seconds_since_epoch; use super::*; @@ -290,7 +288,6 @@ mod tests { tx, create_execution_result([]), vec![], - HashMap::new(), BlockGasCount::default(), VmExecutionMetrics::default(), vec![], diff --git a/core/node/state_keeper/src/testonly/mod.rs b/core/node/state_keeper/src/testonly/mod.rs index ad50c8ca8ce6..b0f641ccbc1a 100644 --- a/core/node/state_keeper/src/testonly/mod.rs +++ b/core/node/state_keeper/src/testonly/mod.rs @@ -8,8 +8,8 @@ use zksync_dal::{ConnectionPool, Core, CoreDal as _}; use zksync_multivm::interface::{ executor::{BatchExecutor, BatchExecutorFactory}, storage::{InMemoryStorage, StorageView}, - BatchTransactionExecutionResult, ExecutionResult, FinishedL1Batch, L1BatchEnv, L2BlockEnv, - SystemEnv, VmExecutionResultAndLogs, + BatchTransactionExecutionResult, FinishedL1Batch, L1BatchEnv, L2BlockEnv, SystemEnv, + VmExecutionResultAndLogs, }; use zksync_state::OwnedStorage; use zksync_test_account::Account; @@ -28,13 +28,7 @@ pub(super) static BASE_SYSTEM_CONTRACTS: Lazy = /// Creates a `TxExecutionResult` object denoting a successful tx execution. pub(crate) fn successful_exec() -> BatchTransactionExecutionResult { BatchTransactionExecutionResult { - tx_result: Box::new(VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, - logs: Default::default(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, - }), + tx_result: Box::new(VmExecutionResultAndLogs::mock_success()), compressed_bytecodes: vec![], call_traces: vec![], } diff --git a/core/node/state_keeper/src/testonly/test_batch_executor.rs b/core/node/state_keeper/src/testonly/test_batch_executor.rs index 5fe05167504c..5625add021bf 100644 --- a/core/node/state_keeper/src/testonly/test_batch_executor.rs +++ b/core/node/state_keeper/src/testonly/test_batch_executor.rs @@ -258,14 +258,11 @@ pub(crate) fn random_upgrade_tx(tx_number: u64) -> ProtocolUpgradeTx { pub(crate) fn successful_exec_with_log() -> BatchTransactionExecutionResult { BatchTransactionExecutionResult { tx_result: Box::new(VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, logs: VmExecutionLogs { user_l2_to_l1_logs: vec![UserL2ToL1Log::default()], ..VmExecutionLogs::default() }, - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, + ..VmExecutionResultAndLogs::mock_success() }), compressed_bytecodes: vec![], call_traces: vec![], @@ -275,13 +272,9 @@ pub(crate) fn successful_exec_with_log() -> BatchTransactionExecutionResult { /// Creates a `TxExecutionResult` object denoting a tx that was rejected. pub(crate) fn rejected_exec(reason: Halt) -> BatchTransactionExecutionResult { BatchTransactionExecutionResult { - tx_result: Box::new(VmExecutionResultAndLogs { - result: ExecutionResult::Halt { reason }, - logs: Default::default(), - statistics: Default::default(), - refunds: Default::default(), - new_known_factory_deps: None, - }), + tx_result: Box::new(VmExecutionResultAndLogs::mock(ExecutionResult::Halt { + reason, + })), compressed_bytecodes: vec![], call_traces: vec![], } diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index 16eed0b2f7f7..28e2f9886b49 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -10,8 +10,8 @@ use tokio::sync::watch; use zksync_config::configs::chain::StateKeeperConfig; use zksync_multivm::{ interface::{ - ExecutionResult, Halt, L1BatchEnv, L2BlockEnv, Refunds, SystemEnv, TxExecutionMode, - VmExecutionLogs, VmExecutionResultAndLogs, VmExecutionStatistics, + Halt, L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode, VmExecutionLogs, + VmExecutionResultAndLogs, VmExecutionStatistics, }, vm_latest::constants::BATCH_COMPUTATIONAL_GAS_LIMIT, }; @@ -120,26 +120,16 @@ pub(super) fn create_execution_result( let total_log_queries = storage_logs.len() + 2; VmExecutionResultAndLogs { - result: ExecutionResult::Success { output: vec![] }, logs: VmExecutionLogs { - events: vec![], - system_l2_to_l1_logs: vec![], - user_l2_to_l1_logs: vec![], storage_logs, total_log_queries_count: total_log_queries, + ..VmExecutionLogs::default() }, statistics: VmExecutionStatistics { - contracts_used: 0, - cycles_used: 0, - gas_used: 0, - gas_remaining: 0, - computational_gas_used: 0, total_log_queries, - pubdata_published: 0, - circuit_statistic: Default::default(), + ..VmExecutionStatistics::default() }, - refunds: Refunds::default(), - new_known_factory_deps: None, + ..VmExecutionResultAndLogs::mock_success() } } diff --git a/core/node/state_keeper/src/updates/l1_batch_updates.rs b/core/node/state_keeper/src/updates/l1_batch_updates.rs index 2979ebbd8c26..aa2e22cac483 100644 --- a/core/node/state_keeper/src/updates/l1_batch_updates.rs +++ b/core/node/state_keeper/src/updates/l1_batch_updates.rs @@ -49,8 +49,6 @@ impl L1BatchUpdates { #[cfg(test)] mod tests { - use std::collections::HashMap; - use zksync_multivm::vm_latest::TransactionVmExt; use zksync_types::{L2BlockNumber, ProtocolVersionId, H256}; @@ -78,7 +76,6 @@ mod tests { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); diff --git a/core/node/state_keeper/src/updates/l2_block_updates.rs b/core/node/state_keeper/src/updates/l2_block_updates.rs index 27995b384abe..6faa098d40a2 100644 --- a/core/node/state_keeper/src/updates/l2_block_updates.rs +++ b/core/node/state_keeper/src/updates/l2_block_updates.rs @@ -5,7 +5,7 @@ use zksync_multivm::{ Call, CompressedBytecodeInfo, ExecutionResult, L2BlockEnv, TransactionExecutionResult, TxExecutionStatus, VmEvent, VmExecutionMetrics, VmExecutionResultAndLogs, }, - vm_latest::{utils::extract_bytecodes_marked_as_known, TransactionVmExt}, + vm_latest::TransactionVmExt, }; use zksync_types::{ block::{BlockGasCount, L2BlockHasher}, @@ -88,16 +88,10 @@ impl L2BlockUpdates { tx_l1_gas_this_tx: BlockGasCount, execution_metrics: VmExecutionMetrics, compressed_bytecodes: Vec, - new_known_factory_deps: HashMap>, call_traces: Vec, ) { let saved_factory_deps = - extract_bytecodes_marked_as_known(&tx_execution_result.logs.events); - self.events.extend(tx_execution_result.logs.events); - self.user_l2_to_l1_logs - .extend(tx_execution_result.logs.user_l2_to_l1_logs); - self.system_l2_to_l1_logs - .extend(tx_execution_result.logs.system_l2_to_l1_logs); + VmEvent::extract_bytecodes_marked_as_known(&tx_execution_result.logs.events); let gas_refunded = tx_execution_result.refunds.gas_refunded; let operator_suggested_refund = tx_execution_result.refunds.operator_suggested_refund; @@ -129,10 +123,10 @@ impl L2BlockUpdates { .collect(); // Ensure that *dynamic* factory deps (ones that may be created when executing EVM contracts) // are added into the lookup map as well. - tx_factory_deps.extend(new_known_factory_deps); + tx_factory_deps.extend(tx_execution_result.dynamic_factory_deps); // Save all bytecodes that were marked as known in the bootloader - let known_bytecodes = saved_factory_deps.into_iter().map(|bytecode_hash| { + let known_bytecodes = saved_factory_deps.map(|bytecode_hash| { let bytecode = tx_factory_deps.get(&bytecode_hash).unwrap_or_else(|| { panic!( "Failed to get factory deps on tx: bytecode hash: {:?}, tx hash: {}", @@ -140,7 +134,7 @@ impl L2BlockUpdates { tx.hash() ) }); - (bytecode_hash, bytecode.to_vec()) + (bytecode_hash, bytecode.clone()) }); self.new_factory_deps.extend(known_bytecodes); @@ -149,6 +143,11 @@ impl L2BlockUpdates { self.txs_encoding_size += tx.bootloader_encoding_size(); self.payload_encoding_size += zksync_protobuf::repr::encode::(&tx).len(); + self.events.extend(tx_execution_result.logs.events); + self.user_l2_to_l1_logs + .extend(tx_execution_result.logs.user_l2_to_l1_logs); + self.system_l2_to_l1_logs + .extend(tx_execution_result.logs.system_l2_to_l1_logs); self.storage_logs .extend(tx_execution_result.logs.storage_logs); @@ -211,7 +210,6 @@ mod tests { BlockGasCount::default(), VmExecutionMetrics::default(), vec![], - HashMap::new(), vec![], ); diff --git a/core/node/state_keeper/src/updates/mod.rs b/core/node/state_keeper/src/updates/mod.rs index b1bd35c921ca..752963580e37 100644 --- a/core/node/state_keeper/src/updates/mod.rs +++ b/core/node/state_keeper/src/updates/mod.rs @@ -1,5 +1,3 @@ -use std::collections::HashMap; - use zksync_contracts::BaseSystemContractsHashes; use zksync_multivm::{ interface::{ @@ -10,7 +8,7 @@ use zksync_multivm::{ }; use zksync_types::{ block::BlockGasCount, commitment::PubdataParams, fee_model::BatchFeeInput, Address, - L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, H256, + L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, }; pub(crate) use self::{l1_batch_updates::L1BatchUpdates, l2_block_updates::L2BlockUpdates}; @@ -119,7 +117,6 @@ impl UpdatesManager { tx: Transaction, tx_execution_result: VmExecutionResultAndLogs, compressed_bytecodes: Vec, - new_known_factory_deps: HashMap>, tx_l1_gas_this_tx: BlockGasCount, execution_metrics: VmExecutionMetrics, call_traces: Vec, @@ -135,7 +132,6 @@ impl UpdatesManager { tx_l1_gas_this_tx, execution_metrics, compressed_bytecodes, - new_known_factory_deps, call_traces, ); latency.observe(); @@ -246,7 +242,6 @@ mod tests { tx, create_execution_result([]), vec![], - HashMap::new(), new_block_gas_count(), VmExecutionMetrics::default(), vec![], diff --git a/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol b/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol index baa0d37b7530..3a7ee40db228 100644 --- a/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol +++ b/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol @@ -90,6 +90,25 @@ contract MockContractDeployer { ACCOUNT_CODE_STORAGE_CONTRACT.storeAccountConstructedCodeHash(newAddress, _salt); return newAddress; } + + bytes32 constant CREATE2_PREFIX = keccak256("zksyncCreate2"); + + /// Mocks `create2` with real counterpart semantics, other than bytecode passed in `_input`. + /// @param _input bytecode to publish + function create2( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input + ) external payable returns (address newAddress) { + KNOWN_CODE_STORAGE_CONTRACT.setEVMBytecodeHash(_bytecodeHash); + KNOWN_CODE_STORAGE_CONTRACT.publishEVMBytecode(_input); + + bytes32 hash = keccak256( + bytes.concat(CREATE2_PREFIX, bytes32(uint256(uint160(msg.sender))), _salt, _bytecodeHash) + ); + newAddress = address(uint160(uint256(hash))); + ACCOUNT_CODE_STORAGE_CONTRACT.storeAccountConstructedCodeHash(newAddress, _bytecodeHash); + } } interface IAccountCodeStorage { @@ -101,6 +120,16 @@ interface IRecursiveContract { function recurse(uint _depth) external returns (uint); } +interface IRecursiveDeployment { + struct EvmDeployment { + bytes32 bytecodeHash; + /// Has fixed length to enable array slicing. + bytes32 bytecode; + } + + function testRecursiveDeployment(EvmDeployment[] calldata _deployments) external; +} + /// Native incrementing library. Not actually a library to simplify deployment. contract IncrementingContract { // Should not collide with other storage slots @@ -154,7 +183,7 @@ uint constant EVM_EMULATOR_STIPEND = 1 << 30; /** * Mock EVM emulator used in low-level tests. */ -contract MockEvmEmulator is IRecursiveContract, IncrementingContract { +contract MockEvmEmulator is IRecursiveContract, IRecursiveDeployment, IncrementingContract { IAccountCodeStorage constant ACCOUNT_CODE_STORAGE_CONTRACT = IAccountCodeStorage(address(0x8002)); /// Set to `true` for testing logic sanity. @@ -210,7 +239,11 @@ contract MockEvmEmulator is IRecursiveContract, IncrementingContract { MockContractDeployer constant CONTRACT_DEPLOYER_CONTRACT = MockContractDeployer(address(0x8006)); /// Emulates EVM contract deployment and a subsequent call to it in a single transaction. - function testDeploymentAndCall(bytes32 _evmBytecodeHash, bytes calldata _evmBytecode) external validEvmEntry { + function testDeploymentAndCall( + bytes32 _evmBytecodeHash, + bytes calldata _evmBytecode, + bool _revert + ) external validEvmEntry { IRecursiveContract newContract = IRecursiveContract(CONTRACT_DEPLOYER_CONTRACT.create( _evmBytecodeHash, _evmBytecodeHash, @@ -222,6 +255,69 @@ contract MockEvmEmulator is IRecursiveContract, IncrementingContract { uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; require(newContract.recurse{gas: gasToSend}(5) == 120, "unexpected recursive result"); + require(!_revert, "requested revert"); + } + + function testCallToPreviousDeployment() external validEvmEntry { + IRecursiveContract newContract = IRecursiveContract(address(uint160(address(this)) + 1)); + require(address(newContract).code.length > 0, "contract code length"); + require(address(newContract).codehash != bytes32(0), "contract code hash"); + + uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; + require(newContract.recurse{gas: gasToSend}(5) == 120, "unexpected recursive result"); + } + + function testRecursiveDeployment(EvmDeployment[] calldata _deployments) external override validEvmEntry { + if (_deployments.length == 0) { + return; + } + + IRecursiveDeployment newContract = IRecursiveDeployment(CONTRACT_DEPLOYER_CONTRACT.create( + _deployments[0].bytecodeHash, + _deployments[0].bytecodeHash, + bytes.concat(_deployments[0].bytecode) + )); + uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; + newContract.testRecursiveDeployment{gas: gasToSend}(_deployments[1:]); + } + + function testDeploymentWithPartialRevert( + EvmDeployment[] calldata _deployments, + bool[] calldata _shouldRevert + ) external validEvmEntry { + require(_deployments.length == _shouldRevert.length, "length mismatch"); + + for (uint i = 0; i < _deployments.length; i++) { + uint gasToSend = gasleft() - EVM_EMULATOR_STIPEND; + try this.deployThenRevert{gas: gasToSend}( + _deployments[i], + bytes32(i), + _shouldRevert[i] + ) returns(address newAddress) { + require(!_shouldRevert[i], "unexpected deploy success"); + require(newAddress.code.length > 0, "contract code length"); + require(newAddress.codehash != bytes32(0), "contract code hash"); + } catch Error(string memory reason) { + require(_shouldRevert[i], "unexpected revert"); + require(keccak256(bytes(reason)) == keccak256("requested revert"), "unexpected error"); + } + } + } + + function deployThenRevert( + EvmDeployment calldata _deployment, + bytes32 _salt, + bool _shouldRevert + ) external validEvmEntry returns (address newAddress) { + newAddress = CONTRACT_DEPLOYER_CONTRACT.create2( + _salt, + _deployment.bytecodeHash, + bytes.concat(_deployment.bytecode) + ); + require(newAddress.code.length > 0, "contract code length"); + require(newAddress.codehash != bytes32(0), "contract code hash"); + + require(!_shouldRevert, "requested revert"); } fallback() external validEvmEntry { diff --git a/yarn.lock b/yarn.lock index 58511dd1b9ff..15fb8bb7d967 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1424,6 +1424,18 @@ resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -2303,6 +2315,11 @@ resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.6.tgz#2a880a24eb19b4f8b25adc2a5095f2aa27f39677" integrity sha512-xSmezSupL+y9VkHZJGDoCBpmnB2ogM13ccaYDWqJTfS3dbuHkgjuwDFUmaFauBCboQMGB/S5UqUl2y54X99BmA== +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + "@pkgr/core@^0.1.0": version "0.1.1" resolved "https://registry.yarnpkg.com/@pkgr/core/-/core-0.1.1.tgz#1ec17e2edbec25c8306d424ecfbf13c7de1aaa31" @@ -2633,6 +2650,16 @@ mkdirp "^2.1.6" path-browserify "^1.0.1" +"@ts-morph/common@~0.23.0": + version "0.23.0" + resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.23.0.tgz#bd4ddbd3f484f29476c8bd985491592ae5fc147e" + integrity sha512-m7Lllj9n/S6sOkCkRftpM7L24uvmfXQFedlW/4hENcuJH1HHm9u5EgxZb9uVjQSCGrbBWBkOGgcTxNg36r6ywA== + dependencies: + fast-glob "^3.3.2" + minimatch "^9.0.3" + mkdirp "^3.0.1" + path-browserify "^1.0.1" + "@tsconfig/node10@^1.0.7": version "1.0.11" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" @@ -3305,6 +3332,11 @@ ansi-regex@^5.0.1: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== +ansi-regex@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" + integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== + ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -3324,6 +3356,11 @@ ansi-styles@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + antlr4@^4.11.0: version "4.13.1" resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.13.1.tgz#1e0a1830a08faeb86217cb2e6c34716004e4253d" @@ -4183,6 +4220,11 @@ code-block-writer@^12.0.0: resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-12.0.0.tgz#4dd58946eb4234105aff7f0035977b2afdc2a770" integrity sha512-q4dMFMlXtKR3XNBHyMHt/3pwYNA69EDk00lloMOaaUMKPUXBw6lpXtbu3MMVG6/uOihGnRDOlkyqsONEUj60+w== +code-block-writer@^13.0.1: + version "13.0.3" + resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-13.0.3.tgz#90f8a84763a5012da7af61319dd638655ae90b5b" + integrity sha512-Oofo0pq3IKnsFtuHqSF7TqBfr71aeyZDVJ0HpmqB7FBM2qEigL0iPONSCZSO9pE9dZTAxANe5XHG9Uy0YMv8cg== + collect-v8-coverage@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" @@ -4435,7 +4477,7 @@ cross-spawn@^6.0.5: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -4738,6 +4780,11 @@ dotenv@^8.2.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" @@ -4804,6 +4851,11 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + encoding-down@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-6.3.0.tgz#b1c4eb0e1728c146ecaef8e32963c549e76d082b" @@ -5774,6 +5826,14 @@ for-each@^0.3.3: dependencies: is-callable "^1.1.3" +foreground-child@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.3.0.tgz#0ac8644c06e431439f8561db8ecf29a7b5519c77" + integrity sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -6062,6 +6122,18 @@ glob@8.1.0, glob@^8.0.3: minimatch "^5.0.1" once "^1.3.0" +glob@^10.4.1: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^1.11.1" + glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" @@ -6974,6 +7046,15 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" +jackspeak@^3.1.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a" + integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + jest-changed-files@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" @@ -7877,6 +7958,11 @@ lowercase-keys@^3.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ== +lru-cache@^10.2.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -8175,6 +8261,13 @@ minimatch@^7.4.3: dependencies: brace-expansion "^2.0.1" +minimatch@^9.0.3, minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + minimatch@~3.0.4: version "3.0.8" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.8.tgz#5e6a59bd11e2ab0de1cfb843eb2d82e546c321c1" @@ -8187,6 +8280,11 @@ minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8, minimist@~1. resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + mkdirp-classic@^0.5.2: version "0.5.3" resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" @@ -8209,6 +8307,11 @@ mkdirp@^2.1.6: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-2.1.6.tgz#964fbcb12b2d8c5d6fbc62a963ac95a273e2cc19" integrity sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A== +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + mnemonist@^0.38.0: version "0.38.5" resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.5.tgz#4adc7f4200491237fe0fa689ac0b86539685cade" @@ -8664,6 +8767,11 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== +package-json-from-dist@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" + integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== + package-json@^8.1.0: version "8.1.1" resolved "https://registry.yarnpkg.com/package-json/-/package-json-8.1.1.tgz#3e9948e43df40d1e8e78a85485f1070bf8f03dc8" @@ -8739,6 +8847,14 @@ path-parse@^1.0.6, path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-to-regexp@^6.2.1: version "6.2.2" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.2.tgz#324377a83e5049cbecadc5554d6a63a9a4866b36" @@ -9739,6 +9855,11 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + sinon-chai@^3.7.0: version "3.7.0" resolved "https://registry.yarnpkg.com/sinon-chai/-/sinon-chai-3.7.0.tgz#cfb7dec1c50990ed18c153f1840721cf13139783" @@ -10070,6 +10191,15 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^2.1.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" @@ -10087,6 +10217,15 @@ string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2 is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + string.prototype.padend@^3.0.0: version "3.1.6" resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.1.6.tgz#ba79cf8992609a91c872daa47c6bb144ee7f62a5" @@ -10144,6 +10283,13 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" @@ -10165,6 +10311,13 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" @@ -10520,6 +10673,14 @@ ts-morph@^19.0.0: "@ts-morph/common" "~0.20.0" code-block-writer "^12.0.0" +ts-morph@^22.0.0: + version "22.0.0" + resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-22.0.0.tgz#5532c592fb6dddae08846f12c9ab0fc590b1d42e" + integrity sha512-M9MqFGZREyeb5fTl6gNHKZLqBQA0TjA1lea+CR48R8EBTDuWrNqW6ccC5QvjNR4s6wDumD3LTCjOFSp9iwlzaw== + dependencies: + "@ts-morph/common" "~0.23.0" + code-block-writer "^13.0.1" + ts-node@^10.1.0, ts-node@^10.7.0: version "10.9.2" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" @@ -11000,6 +11161,15 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -11009,6 +11179,15 @@ wrap-ansi@^7.0.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" From 4509179f62ead4b837dfb67760f52de76fac2e37 Mon Sep 17 00:00:00 2001 From: Alex Ostrovski Date: Fri, 8 Nov 2024 15:55:59 +0200 Subject: [PATCH 6/7] feat(contract-verifier): Adapt contract verifier API for EVM bytecodes (#3234) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - Adapts contract verifier APIs to work with EVM bytecodes; adds corresponding request correctness checks. - Brushes up the verifier API server in general. ## Why ❔ Part of the efforts to support EVM bytecode verification. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [x] Tests for the changes have been added / updated. - [x] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. --- Cargo.lock | 12 +- Cargo.toml | 1 + core/bin/contract-verifier/Cargo.toml | 5 +- core/bin/contract-verifier/src/main.rs | 53 +-- core/lib/contract_verifier/src/lib.rs | 112 ++++-- core/lib/contract_verifier/src/resolver.rs | 19 +- core/lib/contract_verifier/src/tests/mod.rs | 45 ++- core/lib/contract_verifier/src/tests/real.rs | 40 +- ...6b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json | 2 +- ...5094de508af93f4085be7cf3b54b1e8ecdadd.json | 2 +- ...make_zk_compiler_version_nullable.down.sql | 2 + ...2_make_zk_compiler_version_nullable.up.sql | 2 + core/lib/dal/src/contract_verification_dal.rs | 86 ++++- .../models/storage_verification_request.rs | 2 +- core/lib/dal/src/storage_logs_dal.rs | 65 +--- core/lib/dal/src/tokens_dal.rs | 2 +- .../types/src/contract_verification_api.rs | 14 +- .../contract_verification_server/Cargo.toml | 10 +- .../src/api_decl.rs | 11 +- .../src/api_impl.rs | 247 +++++++----- .../contract_verification_server/src/cache.rs | 122 ++++++ .../contract_verification_server/src/lib.rs | 9 +- .../contract_verification_server/src/tests.rs | 356 ++++++++++++++++++ .../layers/contract_verification_api.rs | 2 +- 24 files changed, 923 insertions(+), 298 deletions(-) create mode 100644 core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql create mode 100644 core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql create mode 100644 core/node/contract_verification_server/src/cache.rs create mode 100644 core/node/contract_verification_server/src/tests.rs diff --git a/Cargo.lock b/Cargo.lock index 65ae365e3a2b..04a863448d69 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10784,15 +10784,18 @@ version = "0.1.0" dependencies = [ "anyhow", "axum 0.7.7", - "serde", + "http-body-util", "serde_json", + "test-casing", "tokio", + "tower 0.4.13", "tower-http", "tracing", "vise", - "zksync_config", "zksync_dal", + "zksync_node_test_utils", "zksync_types", + "zksync_utils", ] [[package]] @@ -10800,16 +10803,13 @@ name = "zksync_contract_verifier" version = "0.1.0" dependencies = [ "anyhow", - "ctrlc", - "futures 0.3.31", - "structopt", + "clap 4.5.20", "tokio", "tracing", "zksync_config", "zksync_contract_verifier_lib", "zksync_core_leftovers", "zksync_dal", - "zksync_env_config", "zksync_queued_job_processor", "zksync_utils", "zksync_vlog", diff --git a/Cargo.toml b/Cargo.toml index 87e0de13129f..e491c64605bc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -130,6 +130,7 @@ google-cloud-storage = "0.20.0" governor = "0.4.2" hex = "0.4" http = "1.1" +http-body-util = "0.1.2" httpmock = "0.7.0" hyper = "1.3" insta = "1.29.0" diff --git a/core/bin/contract-verifier/Cargo.toml b/core/bin/contract-verifier/Cargo.toml index f088c2337e71..5e9a9efc6e7e 100644 --- a/core/bin/contract-verifier/Cargo.toml +++ b/core/bin/contract-verifier/Cargo.toml @@ -12,7 +12,6 @@ publish = false [dependencies] zksync_dal.workspace = true -zksync_env_config.workspace = true zksync_config = { workspace = true, features = ["observability_ext"] } zksync_contract_verifier_lib.workspace = true zksync_queued_job_processor.workspace = true @@ -21,8 +20,6 @@ zksync_vlog.workspace = true zksync_core_leftovers.workspace = true anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["full"] } -futures.workspace = true -ctrlc.workspace = true -structopt.workspace = true tracing.workspace = true diff --git a/core/bin/contract-verifier/src/main.rs b/core/bin/contract-verifier/src/main.rs index 6929f8bfe04d..88f25256c40d 100644 --- a/core/bin/contract-verifier/src/main.rs +++ b/core/bin/contract-verifier/src/main.rs @@ -1,8 +1,7 @@ -use std::{cell::RefCell, time::Duration}; +use std::{path::PathBuf, time::Duration}; -use anyhow::Context; -use futures::{channel::mpsc, executor::block_on, SinkExt, StreamExt}; -use structopt::StructOpt; +use anyhow::Context as _; +use clap::Parser; use tokio::sync::watch; use zksync_config::configs::PrometheusConfig; use zksync_contract_verifier_lib::ContractVerifier; @@ -12,27 +11,31 @@ use zksync_queued_job_processor::JobProcessor; use zksync_utils::wait_for_tasks::ManagedTasks; use zksync_vlog::prometheus::PrometheusExporterConfig; -#[derive(StructOpt)] -#[structopt(name = "ZKsync contract code verifier", author = "Matter Labs")] +#[derive(Debug, Parser)] +#[command(name = "ZKsync contract code verifier", author = "Matter Labs")] struct Opt { /// Number of jobs to process. If None, runs indefinitely. - #[structopt(long)] + #[arg(long)] jobs_number: Option, /// Path to the configuration file. - #[structopt(long)] - config_path: Option, + #[arg(long)] + config_path: Option, /// Path to the secrets file. - #[structopt(long)] - secrets_path: Option, + #[arg(long)] + secrets_path: Option, } #[tokio::main] async fn main() -> anyhow::Result<()> { - let opt = Opt::from_args(); + let opt = Opt::parse(); let general_config = load_general_config(opt.config_path).context("general config")?; - let database_secrets = load_database_secrets(opt.secrets_path).context("database secrets")?; + let observability_config = general_config + .observability + .context("ObservabilityConfig")?; + let _observability_guard = observability_config.install()?; + let database_secrets = load_database_secrets(opt.secrets_path).context("database secrets")?; let verifier_config = general_config .contract_verifier .context("ContractVerifierConfig")?; @@ -46,33 +49,13 @@ async fn main() -> anyhow::Result<()> { .context("Master DB URL is absent")?, ) .build() - .await - .unwrap(); - - let observability_config = general_config - .observability - .context("ObservabilityConfig")?; - - let _observability_guard = observability_config.install()?; + .await?; let (stop_sender, stop_receiver) = watch::channel(false); - let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); - { - let stop_signal_sender = RefCell::new(stop_signal_sender.clone()); - ctrlc::set_handler(move || { - let mut sender = stop_signal_sender.borrow_mut(); - block_on(sender.send(true)).expect("Ctrl+C signal send"); - }) - .expect("Error setting Ctrl+C handler"); - } - let contract_verifier = ContractVerifier::new(verifier_config.compilation_timeout(), pool) .await .context("failed initializing contract verifier")?; let tasks = vec![ - // TODO PLA-335: Leftovers after the prover DB split. - // The prover connection pool is not used by the contract verifier, but we need to pass it - // since `JobProcessor` trait requires it. tokio::spawn(contract_verifier.run(stop_receiver.clone(), opt.jobs_number)), tokio::spawn( PrometheusExporterConfig::pull(prometheus_config.listener_port).run(stop_receiver), @@ -82,7 +65,7 @@ async fn main() -> anyhow::Result<()> { let mut tasks = ManagedTasks::new(tasks); tokio::select! { () = tasks.wait_single() => {}, - _ = stop_signal_receiver.next() => { + _ = tokio::signal::ctrl_c() => { tracing::info!("Stop signal received, shutting down"); }, }; diff --git a/core/lib/contract_verifier/src/lib.rs b/core/lib/contract_verifier/src/lib.rs index 425440fa2eb6..686bb0d7bdc3 100644 --- a/core/lib/contract_verifier/src/lib.rs +++ b/core/lib/contract_verifier/src/lib.rs @@ -14,7 +14,7 @@ use zksync_dal::{contract_verification_dal::DeployedContractData, ConnectionPool use zksync_queued_job_processor::{async_trait, JobProcessor}; use zksync_types::{ contract_verification_api::{ - CompilationArtifacts, CompilerType, VerificationIncomingRequest, VerificationInfo, + self as api, CompilationArtifacts, VerificationIncomingRequest, VerificationInfo, VerificationRequest, }, Address, CONTRACT_DEPLOYER_ADDRESS, @@ -35,6 +35,65 @@ mod resolver; #[cfg(test)] mod tests; +#[derive(Debug)] +struct ZkCompilerVersions { + /// Version of the base / non-ZK compiler. + pub base: String, + /// Version of the ZK compiler. + pub zk: String, +} + +/// Internal counterpart of `ContractVersions` from API that encompasses all supported compilation modes. +#[derive(Debug)] +enum VersionedCompiler { + Solc(String), + #[allow(dead_code)] // TODO (EVM-864): add vyper support + Vyper(String), + ZkSolc(ZkCompilerVersions), + ZkVyper(ZkCompilerVersions), +} + +impl From for VersionedCompiler { + fn from(versions: api::CompilerVersions) -> Self { + match versions { + api::CompilerVersions::Solc { + compiler_solc_version, + compiler_zksolc_version: None, + } => Self::Solc(compiler_solc_version), + + api::CompilerVersions::Solc { + compiler_solc_version, + compiler_zksolc_version: Some(zk), + } => Self::ZkSolc(ZkCompilerVersions { + base: compiler_solc_version, + zk, + }), + + api::CompilerVersions::Vyper { + compiler_vyper_version, + compiler_zkvyper_version: None, + } => Self::Vyper(compiler_vyper_version), + + api::CompilerVersions::Vyper { + compiler_vyper_version, + compiler_zkvyper_version: Some(zk), + } => Self::ZkVyper(ZkCompilerVersions { + base: compiler_vyper_version, + zk, + }), + } + } +} + +impl VersionedCompiler { + fn expected_bytecode_kind(&self) -> BytecodeMarker { + match self { + Self::Solc(_) | Self::Vyper(_) => BytecodeMarker::Evm, + Self::ZkSolc(_) | Self::ZkVyper(_) => BytecodeMarker::EraVm, + } + } +} + enum ConstructorArgs { Check(Vec), Ignore, @@ -112,19 +171,19 @@ impl ContractVerifier { let mut transaction = storage.start_transaction().await?; transaction .contract_verification_dal() - .set_zksolc_versions(supported_versions.zksolc) + .set_zksolc_versions(&supported_versions.zksolc) .await?; transaction .contract_verification_dal() - .set_solc_versions(supported_versions.solc) + .set_solc_versions(&supported_versions.solc) .await?; transaction .contract_verification_dal() - .set_zkvyper_versions(supported_versions.zkvyper) + .set_zkvyper_versions(&supported_versions.zkvyper) .await?; transaction .contract_verification_dal() - .set_vyper_versions(supported_versions.vyper) + .set_vyper_versions(&supported_versions.vyper) .await?; transaction.commit().await?; Ok(()) @@ -214,13 +273,11 @@ impl ContractVerifier { async fn compile_zksolc( &self, + version: &ZkCompilerVersions, req: VerificationIncomingRequest, ) -> Result { - let zksolc = self - .compiler_resolver - .resolve_zksolc(&req.compiler_versions) - .await?; - tracing::debug!(?zksolc, ?req.compiler_versions, "resolved compiler"); + let zksolc = self.compiler_resolver.resolve_zksolc(version).await?; + tracing::debug!(?zksolc, ?version, "resolved compiler"); let input = ZkSolc::build_input(req)?; time::timeout(self.compilation_timeout, zksolc.compile(input)) @@ -230,13 +287,11 @@ impl ContractVerifier { async fn compile_zkvyper( &self, + version: &ZkCompilerVersions, req: VerificationIncomingRequest, ) -> Result { - let zkvyper = self - .compiler_resolver - .resolve_zkvyper(&req.compiler_versions) - .await?; - tracing::debug!(?zkvyper, ?req.compiler_versions, "resolved compiler"); + let zkvyper = self.compiler_resolver.resolve_zkvyper(version).await?; + tracing::debug!(?zkvyper, ?version, "resolved compiler"); let input = ZkVyper::build_input(req)?; time::timeout(self.compilation_timeout, zkvyper.compile(input)) .await @@ -245,12 +300,10 @@ impl ContractVerifier { async fn compile_solc( &self, + version: &str, req: VerificationIncomingRequest, ) -> Result { - let solc = self - .compiler_resolver - .resolve_solc(req.compiler_versions.compiler_version()) - .await?; + let solc = self.compiler_resolver.resolve_solc(version).await?; tracing::debug!(?solc, ?req.compiler_versions, "resolved compiler"); let input = Solc::build_input(req)?; @@ -276,15 +329,24 @@ impl ContractVerifier { return Err(err.into()); } - match (bytecode_marker, compiler_type) { - (BytecodeMarker::EraVm, CompilerType::Solc) => self.compile_zksolc(req).await, - (BytecodeMarker::EraVm, CompilerType::Vyper) => self.compile_zkvyper(req).await, - (BytecodeMarker::Evm, CompilerType::Solc) => self.compile_solc(req).await, - (BytecodeMarker::Evm, CompilerType::Vyper) => { - // TODO: add vyper support + let compiler = VersionedCompiler::from(req.compiler_versions.clone()); + if compiler.expected_bytecode_kind() != bytecode_marker { + let err = anyhow::anyhow!( + "bytecode kind expected by compiler {compiler:?} differs from the actual bytecode kind \ + of the verified contract ({bytecode_marker:?})", + ); + return Err(err.into()); + } + + match &compiler { + VersionedCompiler::Solc(version) => self.compile_solc(version, req).await, + VersionedCompiler::Vyper(_) => { + // TODO (EVM-864): add vyper support let err = anyhow::anyhow!("vyper toolchain is not yet supported for EVM contracts"); return Err(err.into()); } + VersionedCompiler::ZkSolc(version) => self.compile_zksolc(version, req).await, + VersionedCompiler::ZkVyper(version) => self.compile_zkvyper(version, req).await, } } diff --git a/core/lib/contract_verifier/src/resolver.rs b/core/lib/contract_verifier/src/resolver.rs index 347db8fff094..34a70b759797 100644 --- a/core/lib/contract_verifier/src/resolver.rs +++ b/core/lib/contract_verifier/src/resolver.rs @@ -6,12 +6,13 @@ use std::{ use anyhow::Context as _; use tokio::fs; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{CompilationArtifacts, CompilerVersions}; +use zksync_types::contract_verification_api::CompilationArtifacts; use zksync_utils::env::Workspace; use crate::{ compilers::{Solc, SolcInput, ZkSolc, ZkSolcInput, ZkVyper, ZkVyperInput}, error::ContractVerifierError, + ZkCompilerVersions, }; #[derive(Debug, Clone, Copy)] @@ -111,13 +112,13 @@ pub(crate) trait CompilerResolver: fmt::Debug + Send + Sync { /// Resolves a `zksolc` compiler. async fn resolve_zksolc( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError>; /// Resolves a `zkvyper` compiler. async fn resolve_zkvyper( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError>; } @@ -198,14 +199,14 @@ impl CompilerResolver for EnvCompilerResolver { async fn resolve_zksolc( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { - let zksolc_version = versions.zk_compiler_version(); + let zksolc_version = &version.zk; let zksolc_path = CompilerType::ZkSolc .bin_path(&self.home_dir, zksolc_version) .await?; let solc_path = CompilerType::Solc - .bin_path(&self.home_dir, versions.compiler_version()) + .bin_path(&self.home_dir, &version.base) .await?; let compiler_paths = CompilerPaths { base: solc_path, @@ -219,13 +220,13 @@ impl CompilerResolver for EnvCompilerResolver { async fn resolve_zkvyper( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { let zkvyper_path = CompilerType::ZkVyper - .bin_path(&self.home_dir, versions.zk_compiler_version()) + .bin_path(&self.home_dir, &version.zk) .await?; let vyper_path = CompilerType::Vyper - .bin_path(&self.home_dir, versions.compiler_version()) + .bin_path(&self.home_dir, &version.base) .await?; let compiler_paths = CompilerPaths { base: vyper_path, diff --git a/core/lib/contract_verifier/src/tests/mod.rs b/core/lib/contract_verifier/src/tests/mod.rs index f05d3155a6d4..7caa5f32c991 100644 --- a/core/lib/contract_verifier/src/tests/mod.rs +++ b/core/lib/contract_verifier/src/tests/mod.rs @@ -280,18 +280,18 @@ impl CompilerResolver for MockCompilerResolver { async fn resolve_zksolc( &self, - versions: &CompilerVersions, + version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { - if versions.compiler_version() != SOLC_VERSION { + if version.base != SOLC_VERSION { return Err(ContractVerifierError::UnknownCompilerVersion( "solc", - versions.compiler_version().to_owned(), + version.base.clone(), )); } - if versions.zk_compiler_version() != ZKSOLC_VERSION { + if version.zk != ZKSOLC_VERSION { return Err(ContractVerifierError::UnknownCompilerVersion( "zksolc", - versions.zk_compiler_version().to_owned(), + version.zk.clone(), )); } Ok(Box::new(self.clone())) @@ -299,7 +299,7 @@ impl CompilerResolver for MockCompilerResolver { async fn resolve_zkvyper( &self, - _versions: &CompilerVersions, + _version: &ZkCompilerVersions, ) -> Result>, ContractVerifierError> { unreachable!("not tested") } @@ -311,7 +311,7 @@ fn test_request(address: Address, source: &str) -> VerificationIncomingRequest { source_code_data: SourceCodeData::SolSingleFile(source.into()), contract_name: "Counter".to_owned(), compiler_versions: CompilerVersions::Solc { - compiler_zksolc_version: ZKSOLC_VERSION.to_owned(), + compiler_zksolc_version: Some(ZKSOLC_VERSION.to_owned()), compiler_solc_version: SOLC_VERSION.to_owned(), }, optimization_used: true, @@ -375,7 +375,7 @@ async fn contract_verifier_basics(contract: TestContract) { req.constructor_arguments = ethabi::encode(contract.constructor_args()).into(); let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -468,10 +468,14 @@ async fn verifying_evm_bytecode(contract: TestContract) { ) .await; let mut req = test_request(address, contract.source()); + req.compiler_versions = CompilerVersions::Solc { + compiler_solc_version: SOLC_VERSION.to_owned(), + compiler_zksolc_version: None, + }; req.constructor_arguments = ethabi::encode(contract.constructor_args()).into(); let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -513,7 +517,7 @@ async fn bytecode_mismatch_error() { let req = test_request(address, COUNTER_CONTRACT); let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -578,6 +582,13 @@ async fn args_mismatch_error(contract: TestContract, bytecode_kind: BytecodeMark } let mut req = test_request(address, contract.source()); + if matches!(bytecode_kind, BytecodeMarker::Evm) { + req.compiler_versions = CompilerVersions::Solc { + compiler_zksolc_version: None, + compiler_solc_version: SOLC_VERSION.to_owned(), + }; + } + // Intentionally encode incorrect constructor args req.constructor_arguments = match contract { TestContract::Counter => ethabi::encode(&[Token::Bool(true)]).into(), @@ -585,7 +596,7 @@ async fn args_mismatch_error(contract: TestContract, bytecode_kind: BytecodeMark }; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -648,10 +659,14 @@ async fn creation_bytecode_mismatch() { &[], ) .await; - let req = test_request(address, COUNTER_CONTRACT); + let mut req = test_request(address, COUNTER_CONTRACT); + req.compiler_versions = CompilerVersions::Solc { + compiler_zksolc_version: None, + compiler_solc_version: SOLC_VERSION.to_owned(), + }; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -696,14 +711,14 @@ async fn no_compiler_version() { mock_deployment(&mut storage, address, vec![0xff; 32], &[]).await; let req = VerificationIncomingRequest { compiler_versions: CompilerVersions::Solc { - compiler_zksolc_version: ZKSOLC_VERSION.to_owned(), + compiler_zksolc_version: Some(ZKSOLC_VERSION.to_owned()), compiler_solc_version: "1.0.0".to_owned(), // a man can dream }, ..test_request(address, COUNTER_CONTRACT) }; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); diff --git a/core/lib/contract_verifier/src/tests/real.rs b/core/lib/contract_verifier/src/tests/real.rs index 5f550a5feea8..a7113044b405 100644 --- a/core/lib/contract_verifier/src/tests/real.rs +++ b/core/lib/contract_verifier/src/tests/real.rs @@ -8,7 +8,7 @@ use zksync_utils::bytecode::validate_bytecode; use super::*; -#[derive(Debug)] +#[derive(Debug, Clone)] struct TestCompilerVersions { solc: String, zksolc: String, @@ -26,10 +26,20 @@ impl TestCompilerVersions { }) } - fn for_zksolc(self) -> CompilerVersions { + fn zksolc(self) -> ZkCompilerVersions { + ZkCompilerVersions { + base: self.solc, + zk: self.zksolc, + } + } + + fn solc_for_api(self, bytecode_kind: BytecodeMarker) -> CompilerVersions { CompilerVersions::Solc { compiler_solc_version: self.solc, - compiler_zksolc_version: self.zksolc, + compiler_zksolc_version: match bytecode_kind { + BytecodeMarker::Evm => None, + BytecodeMarker::EraVm => Some(self.zksolc), + }, } } } @@ -70,10 +80,12 @@ macro_rules! real_resolver { async fn using_real_compiler() { let (compiler_resolver, supported_compilers) = real_resolver!(); - let versions = supported_compilers.for_zksolc(); - let compiler = compiler_resolver.resolve_zksolc(&versions).await.unwrap(); + let compiler = compiler_resolver + .resolve_zksolc(&supported_compilers.clone().zksolc()) + .await + .unwrap(); let req = VerificationIncomingRequest { - compiler_versions: versions, + compiler_versions: supported_compilers.solc_for_api(BytecodeMarker::EraVm), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; let input = ZkSolc::build_input(req).unwrap(); @@ -92,7 +104,7 @@ async fn using_standalone_solc() { let req = VerificationIncomingRequest { compiler_versions: CompilerVersions::Solc { compiler_solc_version: version.clone(), - compiler_zksolc_version: "1000.0.0".to_owned(), // not used + compiler_zksolc_version: None, }, ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; @@ -108,23 +120,22 @@ async fn using_standalone_solc() { async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { let (compiler_resolver, supported_compilers) = real_resolver!(); - let versions = supported_compilers.for_zksolc(); let req = VerificationIncomingRequest { - compiler_versions: versions, + compiler_versions: supported_compilers.clone().solc_for_api(bytecode_kind), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; let address = Address::repeat_byte(1); let output = match bytecode_kind { BytecodeMarker::EraVm => { let compiler = compiler_resolver - .resolve_zksolc(&req.compiler_versions) + .resolve_zksolc(&supported_compilers.zksolc()) .await .unwrap(); let input = ZkSolc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() } BytecodeMarker::Evm => { - let solc_version = req.compiler_versions.compiler_version(); + let solc_version = &supported_compilers.solc; let compiler = compiler_resolver.resolve_solc(solc_version).await.unwrap(); let input = Solc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() @@ -151,7 +162,7 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { } let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); @@ -174,10 +185,9 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { async fn compilation_errors(bytecode_kind: BytecodeMarker) { let (compiler_resolver, supported_compilers) = real_resolver!(); - let versions = supported_compilers.for_zksolc(); let address = Address::repeat_byte(1); let req = VerificationIncomingRequest { - compiler_versions: versions, + compiler_versions: supported_compilers.solc_for_api(bytecode_kind), source_code_data: SourceCodeData::SolSingleFile("contract ???".to_owned()), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; @@ -196,7 +206,7 @@ async fn compilation_errors(bytecode_kind: BytecodeMarker) { let request_id = storage .contract_verification_dal() - .add_contract_verification_request(req) + .add_contract_verification_request(&req) .await .unwrap(); diff --git a/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json b/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json index 1d515edba819..0db6ba6f51b6 100644 --- a/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json +++ b/core/lib/dal/.sqlx/query-2fa2ba4a62f79d780d239409d426b602aa0cf9b0c5b1ef39b7d07d6309454fcd.json @@ -69,7 +69,7 @@ false, false, false, - false, + true, false, false, true, diff --git a/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json b/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json index ebe8ce232cfb..ac7989a5be77 100644 --- a/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json +++ b/core/lib/dal/.sqlx/query-a115f795672787fe25bfaa8fd345094de508af93f4085be7cf3b54b1e8ecdadd.json @@ -67,7 +67,7 @@ false, false, false, - false, + true, false, false, true, diff --git a/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql new file mode 100644 index 000000000000..2693a565fd02 --- /dev/null +++ b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + ALTER COLUMN zk_compiler_version SET NOT NULL; diff --git a/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql new file mode 100644 index 000000000000..92a689956f55 --- /dev/null +++ b/core/lib/dal/migrations/20241106093512_make_zk_compiler_version_nullable.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + ALTER COLUMN zk_compiler_version DROP NOT NULL; diff --git a/core/lib/dal/src/contract_verification_dal.rs b/core/lib/dal/src/contract_verification_dal.rs index 1a827545ca13..93a4ce2fd35a 100644 --- a/core/lib/dal/src/contract_verification_dal.rs +++ b/core/lib/dal/src/contract_verification_dal.rs @@ -76,7 +76,7 @@ impl ContractVerificationDal<'_, '_> { pub async fn add_contract_verification_request( &mut self, - query: VerificationIncomingRequest, + query: &VerificationIncomingRequest, ) -> DalResult { sqlx::query!( r#" @@ -104,12 +104,12 @@ impl ContractVerificationDal<'_, '_> { query.contract_address.as_bytes(), // Serialization should always succeed. serde_json::to_string(&query.source_code_data).unwrap(), - query.contract_name, + &query.contract_name, query.compiler_versions.zk_compiler_version(), query.compiler_versions.compiler_version(), query.optimization_used, - query.optimizer_mode, - query.constructor_arguments.0, + query.optimizer_mode.as_deref(), + query.constructor_arguments.0.as_slice(), query.is_system, query.force_evmla, ) @@ -441,7 +441,7 @@ impl ContractVerificationDal<'_, '_> { async fn set_compiler_versions( &mut self, compiler: Compiler, - versions: Vec, + versions: &[String], ) -> DalResult<()> { let mut transaction = self.storage.start_transaction().await?; let compiler = format!("{compiler}"); @@ -472,7 +472,7 @@ impl ContractVerificationDal<'_, '_> { UNNEST($1::TEXT []) AS u (version) ON CONFLICT (version, compiler) DO NOTHING "#, - &versions, + versions, &compiler, ) .instrument("set_compiler_versions#insert") @@ -484,20 +484,20 @@ impl ContractVerificationDal<'_, '_> { transaction.commit().await } - pub async fn set_zksolc_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_zksolc_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::ZkSolc, versions).await } - pub async fn set_solc_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_solc_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::Solc, versions).await } - pub async fn set_zkvyper_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_zkvyper_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::ZkVyper, versions) .await } - pub async fn set_vyper_versions(&mut self, versions: Vec) -> DalResult<()> { + pub async fn set_vyper_versions(&mut self, versions: &[String]) -> DalResult<()> { self.set_compiler_versions(Compiler::Vyper, versions).await } @@ -567,7 +567,9 @@ mod tests { use std::collections::HashMap; use zksync_types::{ - tx::IncludedTxLocation, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, + contract_verification_api::{CompilerVersions, SourceCodeData}, + tx::IncludedTxLocation, + Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, }; use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics}; @@ -645,4 +647,66 @@ mod tests { assert_eq!(contract.contract_address, Some(CONTRACT_DEPLOYER_ADDRESS)); assert_eq!(contract.calldata.unwrap(), tx.execute.calldata); } + + async fn test_working_with_verification_requests(zksolc: Option<&str>) { + let request = VerificationIncomingRequest { + contract_address: Address::repeat_byte(11), + source_code_data: SourceCodeData::SolSingleFile("contract Test {}".to_owned()), + contract_name: "Test".to_string(), + compiler_versions: CompilerVersions::Solc { + compiler_zksolc_version: zksolc.map(str::to_owned), + compiler_solc_version: "0.8.27".to_owned(), + }, + optimization_used: true, + optimizer_mode: Some("z".to_owned()), + constructor_arguments: web3::Bytes(b"test".to_vec()), + is_system: false, + force_evmla: true, + }; + + let pool = ConnectionPool::::test_pool().await; + let mut conn = pool.connection().await.unwrap(); + let id = conn + .contract_verification_dal() + .add_contract_verification_request(&request) + .await + .unwrap(); + + let status = conn + .contract_verification_dal() + .get_verification_request_status(id) + .await + .unwrap() + .expect("request not persisted"); + assert_eq!(status.status, "queued"); + + let req = conn + .contract_verification_dal() + .get_next_queued_verification_request(Duration::from_secs(600)) + .await + .unwrap() + .expect("request not queued"); + assert_eq!(req.id, id); + assert_eq!(req.req.contract_address, request.contract_address); + assert_eq!(req.req.contract_name, request.contract_name); + assert_eq!(req.req.compiler_versions, request.compiler_versions); + assert_eq!(req.req.optimization_used, request.optimization_used); + assert_eq!(req.req.optimizer_mode, request.optimizer_mode); + assert_eq!(req.req.constructor_arguments, request.constructor_arguments); + assert_eq!(req.req.is_system, request.is_system); + assert_eq!(req.req.force_evmla, request.force_evmla); + + let maybe_req = conn + .contract_verification_dal() + .get_next_queued_verification_request(Duration::from_secs(600)) + .await + .unwrap(); + assert!(maybe_req.is_none()); + } + + #[tokio::test] + async fn working_with_verification_requests() { + test_working_with_verification_requests(None).await; + test_working_with_verification_requests(Some("1.5.7")).await; + } } diff --git a/core/lib/dal/src/models/storage_verification_request.rs b/core/lib/dal/src/models/storage_verification_request.rs index 61895fab76d3..ae4718e41290 100644 --- a/core/lib/dal/src/models/storage_verification_request.rs +++ b/core/lib/dal/src/models/storage_verification_request.rs @@ -12,7 +12,7 @@ pub struct StorageVerificationRequest { pub contract_address: Vec, pub source_code: String, pub contract_name: String, - pub zk_compiler_version: String, + pub zk_compiler_version: Option, pub compiler_version: String, pub optimization_used: bool, pub optimizer_mode: Option, diff --git a/core/lib/dal/src/storage_logs_dal.rs b/core/lib/dal/src/storage_logs_dal.rs index ced8f594add3..1675d76643c2 100644 --- a/core/lib/dal/src/storage_logs_dal.rs +++ b/core/lib/dal/src/storage_logs_dal.rs @@ -225,60 +225,13 @@ impl StorageLogsDal<'_, '_> { Ok(()) } - pub async fn is_contract_deployed_at_address(&mut self, address: Address) -> bool { - let hashed_key = get_code_key(&address).hashed_key(); - let row = sqlx::query!( - r#" - SELECT - COUNT(*) AS "count!" - FROM - ( - SELECT - * - FROM - storage_logs - WHERE - hashed_key = $1 - AND miniblock_number <= COALESCE( - ( - SELECT - MAX(number) - FROM - miniblocks - ), - ( - SELECT - miniblock_number - FROM - snapshot_recovery - ) - ) - ORDER BY - miniblock_number DESC, - operation_number DESC - LIMIT - 1 - ) sl - WHERE - sl.value != $2 - "#, - hashed_key.as_bytes(), - FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes(), - ) - .fetch_one(self.storage.conn()) - .await - .unwrap(); - - row.count > 0 - } - /// Returns addresses and the corresponding deployment L2 block numbers among the specified contract /// `addresses`. `at_l2_block` allows filtering deployment by L2 blocks. pub async fn filter_deployed_contracts( &mut self, addresses: impl Iterator, at_l2_block: Option, - ) -> DalResult> { + ) -> DalResult> { let (bytecode_hashed_keys, address_by_hashed_key): (Vec<_>, HashMap<_, _>) = addresses .map(|address| { let hashed_key = get_code_key(&address).hashed_key().0; @@ -330,12 +283,13 @@ impl StorageLogsDal<'_, '_> { .await?; let deployment_data = rows.into_iter().filter_map(|row| { - if row.value == FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() { + let bytecode_hash = H256::from_slice(&row.value); + if bytecode_hash == FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH { return None; } let l2_block_number = L2BlockNumber(row.miniblock_number as u32); let address = address_by_hashed_key[row.hashed_key.as_slice()]; - Some((address, l2_block_number)) + Some((address, (l2_block_number, bytecode_hash))) }); Ok(deployment_data.collect()) } @@ -1168,8 +1122,9 @@ mod tests { async fn filtering_deployed_contracts() { let contract_address = Address::repeat_byte(1); let other_contract_address = Address::repeat_byte(23); + let bytecode_hash = H256::repeat_byte(0xff); let successful_deployment = - StorageLog::new_write_log(get_code_key(&contract_address), H256::repeat_byte(0xff)); + StorageLog::new_write_log(get_code_key(&contract_address), bytecode_hash); let failed_deployment = StorageLog::new_write_log( get_code_key(&contract_address), FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, @@ -1233,7 +1188,7 @@ mod tests { .unwrap(); assert_eq!( deployed_map, - HashMap::from([(contract_address, L2BlockNumber(2))]) + HashMap::from([(contract_address, (L2BlockNumber(2), bytecode_hash))]) ); } @@ -1268,7 +1223,7 @@ mod tests { .unwrap(); assert_eq!( deployed_map, - HashMap::from([(contract_address, L2BlockNumber(2))]) + HashMap::from([(contract_address, (L2BlockNumber(2), bytecode_hash))]) ); for new_l2_block in [None, Some(L2BlockNumber(3))] { @@ -1283,8 +1238,8 @@ mod tests { assert_eq!( deployed_map, HashMap::from([ - (contract_address, L2BlockNumber(2)), - (other_contract_address, L2BlockNumber(3)), + (contract_address, (L2BlockNumber(2), bytecode_hash)), + (other_contract_address, (L2BlockNumber(3), bytecode_hash)), ]) ); } diff --git a/core/lib/dal/src/tokens_dal.rs b/core/lib/dal/src/tokens_dal.rs index 218e152fa82a..b5fd67fc63c8 100644 --- a/core/lib/dal/src/tokens_dal.rs +++ b/core/lib/dal/src/tokens_dal.rs @@ -98,7 +98,7 @@ impl TokensDal<'_, '_> { .filter_map(|address| { if address.is_zero() { None - } else if let Some(deployed_at) = token_deployment_data.get(&address) { + } else if let Some((deployed_at, _)) = token_deployment_data.get(&address) { (deployed_at > &block_number).then_some(address.0) } else { // Token belongs to a "pending" L2 block that's not yet fully inserted to the database. diff --git a/core/lib/types/src/contract_verification_api.rs b/core/lib/types/src/contract_verification_api.rs index fcaa1aa9a535..21e511549beb 100644 --- a/core/lib/types/src/contract_verification_api.rs +++ b/core/lib/types/src/contract_verification_api.rs @@ -152,17 +152,19 @@ pub enum CompilerType { Vyper, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum CompilerVersions { #[serde(rename_all = "camelCase")] Solc { - compiler_zksolc_version: String, // FIXME: optional? + #[serde(default, skip_serializing_if = "Option::is_none")] + compiler_zksolc_version: Option, compiler_solc_version: String, }, #[serde(rename_all = "camelCase")] Vyper { - compiler_zkvyper_version: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + compiler_zkvyper_version: Option, compiler_vyper_version: String, }, } @@ -175,16 +177,16 @@ impl CompilerVersions { } } - pub fn zk_compiler_version(&self) -> &str { + pub fn zk_compiler_version(&self) -> Option<&str> { match self { Self::Solc { compiler_zksolc_version, .. - } => compiler_zksolc_version, + } => compiler_zksolc_version.as_deref(), Self::Vyper { compiler_zkvyper_version, .. - } => compiler_zkvyper_version, + } => compiler_zkvyper_version.as_deref(), } } diff --git a/core/node/contract_verification_server/Cargo.toml b/core/node/contract_verification_server/Cargo.toml index eeb2c7828467..038347debc64 100644 --- a/core/node/contract_verification_server/Cargo.toml +++ b/core/node/contract_verification_server/Cargo.toml @@ -11,9 +11,9 @@ keywords.workspace = true categories.workspace = true [dependencies] -zksync_config.workspace = true zksync_dal.workspace = true zksync_types.workspace = true +zksync_utils.workspace = true vise.workspace = true anyhow.workspace = true @@ -21,5 +21,11 @@ axum.workspace = true tokio = { workspace = true, features = ["time"] } tower-http = { workspace = true, features = ["cors"] } tracing.workspace = true -serde.workspace = true + +[dev-dependencies] +zksync_node_test_utils.workspace = true + +http-body-util.workspace = true serde_json.workspace = true +test-casing.workspace = true +tower.workspace = true diff --git a/core/node/contract_verification_server/src/api_decl.rs b/core/node/contract_verification_server/src/api_decl.rs index 256062936d32..d451cd79add9 100644 --- a/core/node/contract_verification_server/src/api_decl.rs +++ b/core/node/contract_verification_server/src/api_decl.rs @@ -3,10 +3,13 @@ use std::sync::Arc; use tower_http::cors::CorsLayer; use zksync_dal::{ConnectionPool, Core}; +use crate::cache::SupportedCompilersCache; + #[derive(Debug, Clone)] -pub struct RestApi { - pub(super) master_connection_pool: ConnectionPool, - pub(super) replica_connection_pool: ConnectionPool, +pub(crate) struct RestApi { + pub(crate) master_connection_pool: ConnectionPool, + pub(crate) replica_connection_pool: ConnectionPool, + pub(crate) supported_compilers: Arc, } impl RestApi { @@ -14,7 +17,9 @@ impl RestApi { master_connection_pool: ConnectionPool, replica_connection_pool: ConnectionPool, ) -> Self { + let supported_compilers = SupportedCompilersCache::new(replica_connection_pool.clone()); Self { + supported_compilers: Arc::new(supported_compilers), master_connection_pool, replica_connection_pool, } diff --git a/core/node/contract_verification_server/src/api_impl.rs b/core/node/contract_verification_server/src/api_impl.rs index b8111e98a1cc..94be65673bad 100644 --- a/core/node/contract_verification_server/src/api_impl.rs +++ b/core/node/contract_verification_server/src/api_impl.rs @@ -1,195 +1,234 @@ -use std::sync::Arc; +use std::{collections::HashSet, iter, sync::Arc}; +use anyhow::Context as _; use axum::{ extract::{Path, State}, - response::Response, + http::StatusCode, + response::{IntoResponse, Response}, Json, }; -use serde::Serialize; -use zksync_dal::CoreDal; -use zksync_types::{contract_verification_api::VerificationIncomingRequest, Address}; +use zksync_dal::{CoreDal, DalError}; +use zksync_types::{ + contract_verification_api::{ + CompilerVersions, VerificationIncomingRequest, VerificationInfo, VerificationRequestStatus, + }, + Address, +}; +use zksync_utils::bytecode::BytecodeMarker; use super::{api_decl::RestApi, metrics::METRICS}; -fn ok_json(data: impl Serialize) -> Response { - Response::builder() - .status(axum::http::StatusCode::OK) - .body(serde_json::to_string(&data).expect("Failed to serialize")) - .unwrap() +#[derive(Debug)] +pub(crate) enum ApiError { + IncorrectCompilerVersions, + UnsupportedCompilerVersions, + MissingZkCompilerVersion, + BogusZkCompilerVersion, + NoDeployedContract, + RequestNotFound, + VerificationInfoNotFound, + Internal(anyhow::Error), +} + +impl From for ApiError { + fn from(err: anyhow::Error) -> Self { + Self::Internal(err) + } +} + +impl From for ApiError { + fn from(err: DalError) -> Self { + Self::Internal(err.generalize()) + } } -fn bad_request(message: &str) -> Response { - Response::builder() - .status(axum::http::StatusCode::BAD_REQUEST) - .body(message.to_string()) - .unwrap() +impl ApiError { + pub fn message(&self) -> &'static str { + match self { + Self::IncorrectCompilerVersions => "incorrect compiler versions", + Self::UnsupportedCompilerVersions => "unsupported compiler versions", + Self::MissingZkCompilerVersion => "missing zk compiler version for EraVM bytecode", + Self::BogusZkCompilerVersion => "zk compiler version specified for EVM bytecode", + Self::NoDeployedContract => "There is no deployed contract on this address", + Self::RequestNotFound => "request not found", + Self::VerificationInfoNotFound => "verification info not found for address", + Self::Internal(_) => "internal server error", + } + } } -fn not_found() -> Response { - Response::builder() - .status(axum::http::StatusCode::NOT_FOUND) - .body(String::new()) - .unwrap() +impl IntoResponse for ApiError { + fn into_response(self) -> Response { + let status_code = match &self { + Self::IncorrectCompilerVersions + | Self::UnsupportedCompilerVersions + | Self::MissingZkCompilerVersion + | Self::BogusZkCompilerVersion + | Self::NoDeployedContract => StatusCode::BAD_REQUEST, + + Self::RequestNotFound | Self::VerificationInfoNotFound => StatusCode::NOT_FOUND, + + Self::Internal(err) => { + // Do not expose the error details to the client, but log it. + tracing::warn!("Internal error: {err:#}"); + StatusCode::INTERNAL_SERVER_ERROR + } + }; + (status_code, self.message()).into_response() + } } +type ApiResult = Result, ApiError>; + impl RestApi { #[tracing::instrument(skip(query))] fn validate_contract_verification_query( query: &VerificationIncomingRequest, - ) -> Result<(), Response> { + ) -> Result<(), ApiError> { if query.source_code_data.compiler_type() != query.compiler_versions.compiler_type() { - return Err(bad_request("incorrect compiler versions")); + return Err(ApiError::IncorrectCompilerVersions); } - Ok(()) } + fn validate_compilers( + versions: &CompilerVersions, + bytecode_kind: BytecodeMarker, + ) -> Result<(), ApiError> { + match bytecode_kind { + BytecodeMarker::EraVm if versions.zk_compiler_version().is_none() => { + Err(ApiError::MissingZkCompilerVersion) + } + BytecodeMarker::Evm if versions.zk_compiler_version().is_some() => { + Err(ApiError::BogusZkCompilerVersion) + } + _ => Ok(()), + } + } + /// Add a contract verification job to the queue if the requested contract wasn't previously verified. + // FIXME: this doesn't seem to check that the contract isn't verified; should it? #[tracing::instrument(skip(self_, request))] pub async fn verification( State(self_): State>, Json(request): Json, - ) -> Response { + ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification"].start(); - if let Err(res) = Self::validate_contract_verification_query(&request) { - return res; + Self::validate_contract_verification_query(&request)?; + + let is_compilation_supported = self_ + .supported_compilers + .get(|supported| supported.contain(&request.compiler_versions)) + .await?; + if !is_compilation_supported { + return Err(ApiError::UnsupportedCompilerVersions); } + let mut storage = self_ .master_connection_pool .connection_tagged("api") - .await - .unwrap(); - - if !storage + .await?; + let deployment_info = storage .storage_logs_dal() - .is_contract_deployed_at_address(request.contract_address) - .await - { - return bad_request("There is no deployed contract on this address"); - } + .filter_deployed_contracts(iter::once(request.contract_address), None) + .await?; + let &(_, bytecode_hash) = deployment_info + .get(&request.contract_address) + .ok_or(ApiError::NoDeployedContract)?; + let bytecode_marker = BytecodeMarker::new(bytecode_hash).with_context(|| { + format!( + "unknown bytecode marker for bytecode hash {bytecode_hash:?} at address {:?}", + request.contract_address + ) + })?; + Self::validate_compilers(&request.compiler_versions, bytecode_marker)?; let request_id = storage .contract_verification_dal() - .add_contract_verification_request(request) - .await - .unwrap(); - + .add_contract_verification_request(&request) + .await?; method_latency.observe(); - ok_json(request_id) + Ok(Json(request_id)) } #[tracing::instrument(skip(self_))] pub async fn verification_request_status( State(self_): State>, id: Path, - ) -> Response { + ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification_request_status"].start(); let status = self_ .replica_connection_pool .connection_tagged("api") - .await - .unwrap() + .await? .contract_verification_dal() .get_verification_request_status(*id) - .await - .unwrap(); + .await? + .ok_or(ApiError::RequestNotFound)?; method_latency.observe(); - match status { - Some(status) => ok_json(status), - None => not_found(), - } + Ok(Json(status)) } #[tracing::instrument(skip(self_))] - pub async fn zksolc_versions(State(self_): State>) -> Response { + pub async fn zksolc_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_zksolc_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_zksolc_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.zksolc.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] - pub async fn solc_versions(State(self_): State>) -> Response { + pub async fn solc_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_solc_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_solc_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.solc.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] - pub async fn zkvyper_versions(State(self_): State>) -> Response { + pub async fn zkvyper_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_zkvyper_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_zkvyper_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.zkvyper.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] - pub async fn vyper_versions(State(self_): State>) -> Response { + pub async fn vyper_versions(State(self_): State>) -> ApiResult> { let method_latency = METRICS.call[&"contract_verification_vyper_versions"].start(); let versions = self_ - .replica_connection_pool - .connection_tagged("api") - .await - .unwrap() - .contract_verification_dal() - .get_vyper_versions() - .await - .unwrap(); - + .supported_compilers + .get(|supported| supported.vyper.clone()) + .await?; method_latency.observe(); - ok_json(versions) + Ok(Json(versions)) } #[tracing::instrument(skip(self_))] pub async fn verification_info( State(self_): State>, address: Path
, - ) -> Response { + ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification_info"].start(); - let info = self_ .replica_connection_pool .connection_tagged("api") - .await - .unwrap() + .await? .contract_verification_dal() .get_contract_verification_info(*address) - .await - .unwrap(); - + .await? + .ok_or(ApiError::VerificationInfoNotFound)?; method_latency.observe(); - match info { - Some(info) => ok_json(info), - None => not_found(), - } + Ok(Json(info)) } } diff --git a/core/node/contract_verification_server/src/cache.rs b/core/node/contract_verification_server/src/cache.rs new file mode 100644 index 000000000000..c8e367515287 --- /dev/null +++ b/core/node/contract_verification_server/src/cache.rs @@ -0,0 +1,122 @@ +use std::{ + collections::HashSet, + time::{Duration, Instant}, +}; + +use tokio::sync::RwLock; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; +use zksync_types::contract_verification_api::CompilerVersions; + +/// Compiler versions supported by the contract verifier. +#[derive(Debug, Clone)] +pub(crate) struct SupportedCompilerVersions { + pub solc: HashSet, + pub zksolc: HashSet, + pub vyper: HashSet, + pub zkvyper: HashSet, +} + +impl SupportedCompilerVersions { + /// Checks whether the supported compilers include ones specified in a request. + pub fn contain(&self, versions: &CompilerVersions) -> bool { + match versions { + CompilerVersions::Solc { + compiler_solc_version, + compiler_zksolc_version, + } => { + self.solc.contains(compiler_solc_version) + && compiler_zksolc_version + .as_ref() + .map_or(true, |ver| self.zksolc.contains(ver)) + } + CompilerVersions::Vyper { + compiler_vyper_version, + compiler_zkvyper_version, + } => { + self.vyper.contains(compiler_vyper_version) + && compiler_zkvyper_version + .as_ref() + .map_or(true, |ver| self.zkvyper.contains(ver)) + } + } + } +} + +impl SupportedCompilerVersions { + async fn new(connection: &mut Connection<'_, Core>) -> Result { + let solc = connection + .contract_verification_dal() + .get_solc_versions() + .await?; + let zksolc = connection + .contract_verification_dal() + .get_zksolc_versions() + .await?; + let vyper = connection + .contract_verification_dal() + .get_vyper_versions() + .await?; + let zkvyper = connection + .contract_verification_dal() + .get_zkvyper_versions() + .await?; + Ok(Self { + solc: solc.into_iter().collect(), + zksolc: zksolc.into_iter().collect(), + vyper: vyper.into_iter().collect(), + zkvyper: zkvyper.into_iter().collect(), + }) + } +} + +/// Cache for compiler versions supported by the contract verifier. +#[derive(Debug)] +pub(crate) struct SupportedCompilersCache { + connection_pool: ConnectionPool, + inner: RwLock>, +} + +impl SupportedCompilersCache { + const CACHE_UPDATE_INTERVAL: Duration = Duration::from_secs(10); + + pub fn new(connection_pool: ConnectionPool) -> Self { + Self { + connection_pool, + inner: RwLock::new(None), + } + } + + fn get_cached( + cache: Option<&(SupportedCompilerVersions, Instant)>, + action: impl FnOnce(&SupportedCompilerVersions) -> R, + ) -> Option { + cache.and_then(|(versions, updated_at)| { + (updated_at.elapsed() <= Self::CACHE_UPDATE_INTERVAL).then(|| action(versions)) + }) + } + + pub async fn get( + &self, + action: impl Fn(&SupportedCompilerVersions) -> R, + ) -> Result { + let output = Self::get_cached(self.inner.read().await.as_ref(), &action); + if let Some(output) = output { + return Ok(output); + } + + // We don't want to hold an exclusive lock while querying Postgres. + let supported = { + let mut connection = self.connection_pool.connection_tagged("api").await?; + let mut db_transaction = connection + .transaction_builder()? + .set_readonly() + .build() + .await?; + SupportedCompilerVersions::new(&mut db_transaction).await? + }; + let output = action(&supported); + // Another task may have written to the cache already, but we should be fine with updating it again. + *self.inner.write().await = Some((supported, Instant::now())); + Ok(output) + } +} diff --git a/core/node/contract_verification_server/src/lib.rs b/core/node/contract_verification_server/src/lib.rs index eea45f8564bf..912cec55f0b8 100644 --- a/core/node/contract_verification_server/src/lib.rs +++ b/core/node/contract_verification_server/src/lib.rs @@ -1,21 +1,24 @@ +use std::net::SocketAddr; + use anyhow::Context as _; use tokio::sync::watch; -use zksync_config::ContractVerifierConfig; use zksync_dal::ConnectionPool; use self::api_decl::RestApi; mod api_decl; mod api_impl; +mod cache; mod metrics; +#[cfg(test)] +mod tests; pub async fn start_server( master_connection_pool: ConnectionPool, replica_connection_pool: ConnectionPool, - config: ContractVerifierConfig, + bind_address: SocketAddr, mut stop_receiver: watch::Receiver, ) -> anyhow::Result<()> { - let bind_address = config.bind_addr(); let api = RestApi::new(master_connection_pool, replica_connection_pool).into_router(); let listener = tokio::net::TcpListener::bind(bind_address) diff --git a/core/node/contract_verification_server/src/tests.rs b/core/node/contract_verification_server/src/tests.rs new file mode 100644 index 000000000000..b7b0d3e8efb4 --- /dev/null +++ b/core/node/contract_verification_server/src/tests.rs @@ -0,0 +1,356 @@ +//! Tests for contract verification API server. + +use std::{str, time::Duration}; + +use axum::{ + body::Body, + http::{header, Method, Request, Response, StatusCode}, +}; +use http_body_util::BodyExt as _; +use test_casing::test_casing; +use tower::ServiceExt; +use zksync_dal::{Connection, Core, CoreDal}; +use zksync_node_test_utils::create_l2_block; +use zksync_types::{ + contract_verification_api::CompilerVersions, get_code_key, Address, L2BlockNumber, + ProtocolVersion, StorageLog, +}; +use zksync_utils::bytecode::{hash_bytecode, hash_evm_bytecode, BytecodeMarker}; + +use super::*; +use crate::api_impl::ApiError; + +const SOLC_VERSION: &str = "0.8.27"; +const ZKSOLC_VERSION: &str = "1.5.6"; + +async fn prepare_storage(storage: &mut Connection<'_, Core>) { + storage + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + storage + .blocks_dal() + .insert_l2_block(&create_l2_block(0)) + .await + .unwrap(); + + storage + .contract_verification_dal() + .set_solc_versions(&[SOLC_VERSION.to_owned()]) + .await + .unwrap(); + storage + .contract_verification_dal() + .set_zksolc_versions(&[ZKSOLC_VERSION.to_owned()]) + .await + .unwrap(); +} + +async fn mock_deploy_contract( + storage: &mut Connection<'_, Core>, + address: Address, + kind: BytecodeMarker, +) { + let bytecode_hash = match kind { + BytecodeMarker::EraVm => hash_bytecode(&[0; 32]), + BytecodeMarker::Evm => hash_evm_bytecode(&[0; 96]), + }; + let deploy_log = StorageLog::new_write_log(get_code_key(&address), bytecode_hash); + storage + .storage_logs_dal() + .append_storage_logs(L2BlockNumber(0), &[deploy_log]) + .await + .unwrap() +} + +fn post_request(body: &serde_json::Value) -> Request { + Request::builder() + .method(Method::POST) + .uri("/contract_verification") + .header(header::CONTENT_TYPE, "application/json") + .body(Body::from(serde_json::to_vec(body).unwrap())) + .unwrap() +} + +async fn json_response(response: Response) -> serde_json::Value { + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get(header::CONTENT_TYPE).unwrap(), + "application/json" + ); + let response = response.into_body(); + let response = response.collect().await.unwrap().to_bytes(); + serde_json::from_slice(&response).unwrap() +} + +#[tokio::test] +async fn getting_compiler_versions() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let router = RestApi::new(pool.clone(), pool).into_router(); + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/zksolc_versions") + .body(Body::empty()) + .unwrap(); + let response = router.clone().oneshot(req).await.unwrap(); + let versions = json_response(response).await; + assert_eq!(versions, serde_json::json!([ZKSOLC_VERSION])); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/solc_versions") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + let versions = json_response(response).await; + assert_eq!(versions, serde_json::json!([SOLC_VERSION])); +} + +#[test_casing(2, [BytecodeMarker::EraVm, BytecodeMarker::Evm])] +#[tokio::test] +async fn submitting_request(bytecode_kind: BytecodeMarker) { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerZksolcVersion": match bytecode_kind { + BytecodeMarker::EraVm => Some(ZKSOLC_VERSION), + BytecodeMarker::Evm => None, + }, + "compilerSolcVersion": SOLC_VERSION, + "optimizationUsed": true, + }); + + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .clone() + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); // the address is not deployed to + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::NoDeployedContract.message()); + + mock_deploy_contract(&mut storage, address, bytecode_kind).await; + + let response = router + .clone() + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + let id = json_response(response).await; + assert_eq!(id, serde_json::json!(1)); + + let request = storage + .contract_verification_dal() + .get_next_queued_verification_request(Duration::from_secs(600)) + .await + .unwrap() + .expect("request not persisted"); + assert_eq!(request.id, 1); + assert_eq!(request.req.contract_address, address); + assert_eq!( + request.req.compiler_versions, + CompilerVersions::Solc { + compiler_zksolc_version: match bytecode_kind { + BytecodeMarker::EraVm => Some(ZKSOLC_VERSION.to_owned()), + BytecodeMarker::Evm => None, + }, + compiler_solc_version: SOLC_VERSION.to_owned(), + } + ); + assert_eq!(request.req.contract_name, "Test"); + assert!(request.req.optimization_used); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/1") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + let request_status = json_response(response).await; + assert_eq!(request_status["status"], "in_progress"); +} + +#[test_casing(2, [BytecodeMarker::EraVm, BytecodeMarker::Evm])] +#[tokio::test] +async fn submitting_request_with_invalid_compiler_type(bytecode_kind: BytecodeMarker) { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, bytecode_kind).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + // Intentionally incorrect versions "shape" + "compilerZksolcVersion": match bytecode_kind { + BytecodeMarker::Evm => Some(ZKSOLC_VERSION), + BytecodeMarker::EraVm => None, + }, + "compilerSolcVersion": SOLC_VERSION, + "optimizationUsed": true, + }); + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + let expected_message = match bytecode_kind { + BytecodeMarker::Evm => ApiError::BogusZkCompilerVersion.message(), + BytecodeMarker::EraVm => ApiError::MissingZkCompilerVersion.message(), + }; + assert_eq!(error_message, expected_message); +} + +#[test_casing(2, [BytecodeMarker::EraVm, BytecodeMarker::Evm])] +#[tokio::test] +async fn submitting_request_with_unsupported_solc(bytecode_kind: BytecodeMarker) { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, bytecode_kind).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerZksolcVersion": match bytecode_kind { + BytecodeMarker::Evm => None, + BytecodeMarker::EraVm => Some(ZKSOLC_VERSION), + }, + "compilerSolcVersion": "1.0.0", + "optimizationUsed": true, + }); + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!( + error_message, + ApiError::UnsupportedCompilerVersions.message() + ); +} + +#[tokio::test] +async fn submitting_request_with_unsupported_zksolc() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, BytecodeMarker::EraVm).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerZksolcVersion": "1000.0.0", + "compilerSolcVersion": SOLC_VERSION, + "optimizationUsed": true, + }); + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!( + error_message, + ApiError::UnsupportedCompilerVersions.message() + ); +} + +#[tokio::test] +async fn querying_missing_request() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + let router = RestApi::new(pool.clone(), pool).into_router(); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/1") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + + assert_eq!(response.status(), StatusCode::NOT_FOUND); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::RequestNotFound.message()); +} + +#[tokio::test] +async fn querying_missing_verification_info() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + let router = RestApi::new(pool.clone(), pool).into_router(); + + let req = Request::builder() + .method(Method::GET) + .uri("/contract_verification/info/0x2323232323232323232323232323232323232323") + .body(Body::empty()) + .unwrap(); + let response = router.oneshot(req).await.unwrap(); + + assert_eq!(response.status(), StatusCode::NOT_FOUND); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::VerificationInfoNotFound.message()); +} + +#[tokio::test] +async fn mismatched_compiler_type() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + let address = Address::repeat_byte(0x23); + mock_deploy_contract(&mut storage, address, BytecodeMarker::EraVm).await; + + let verification_request = serde_json::json!({ + "contractAddress": address, + "sourceCode": "contract Test {}", + "contractName": "Test", + "compilerVyperVersion": "1.0.1", + "optimizationUsed": true, + }); + + let router = RestApi::new(pool.clone(), pool).into_router(); + let response = router + .oneshot(post_request(&verification_request)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let error_message = response.collect().await.unwrap().to_bytes(); + let error_message = str::from_utf8(&error_message).unwrap(); + assert_eq!(error_message, ApiError::IncorrectCompilerVersions.message()); +} diff --git a/core/node/node_framework/src/implementations/layers/contract_verification_api.rs b/core/node/node_framework/src/implementations/layers/contract_verification_api.rs index 3f1f76cc1c12..2ca7cc25a1fd 100644 --- a/core/node/node_framework/src/implementations/layers/contract_verification_api.rs +++ b/core/node/node_framework/src/implementations/layers/contract_verification_api.rs @@ -69,7 +69,7 @@ impl Task for ContractVerificationApiTask { zksync_contract_verification_server::start_server( self.master_pool, self.replica_pool, - self.config, + self.config.bind_addr(), stop_receiver.0, ) .await From 1bfff0e007e2fb5a4b4b885cf5c69a5cd290888b Mon Sep 17 00:00:00 2001 From: Yury Akudovich Date: Fri, 8 Nov 2024 22:07:51 +0100 Subject: [PATCH 7/7] fix(prover): Remove unneeded dependencies, add default for graceful_shutdown_timeout (#3242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Remove unneeded dependencies. Add default for graceful_shutdown_timeout. ## Why ❔ ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [x] Code has been formatted via `zkstack dev fmt` and `zkstack dev lint`. ref ZKD-1855 --- prover/Cargo.lock | 2 -- prover/crates/bin/prover_autoscaler/Cargo.toml | 2 -- prover/crates/bin/prover_autoscaler/src/config.rs | 5 ++++- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 0a86a44f145d..f119d4bd1951 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -8395,9 +8395,7 @@ dependencies = [ "url", "vise", "zksync_config", - "zksync_core_leftovers", "zksync_prover_job_monitor", - "zksync_types", "zksync_utils", "zksync_vlog", ] diff --git a/prover/crates/bin/prover_autoscaler/Cargo.toml b/prover/crates/bin/prover_autoscaler/Cargo.toml index 88569aa87e94..4e66ecc2b0e3 100644 --- a/prover/crates/bin/prover_autoscaler/Cargo.toml +++ b/prover/crates/bin/prover_autoscaler/Cargo.toml @@ -10,10 +10,8 @@ keywords.workspace = true categories.workspace = true [dependencies] -zksync_core_leftovers.workspace = true zksync_vlog.workspace = true zksync_utils.workspace = true -zksync_types.workspace = true zksync_config = { workspace = true, features = ["observability_ext"] } zksync_prover_job_monitor.workspace = true diff --git a/prover/crates/bin/prover_autoscaler/src/config.rs b/prover/crates/bin/prover_autoscaler/src/config.rs index 6729a5372d56..777ffe89fc91 100644 --- a/prover/crates/bin/prover_autoscaler/src/config.rs +++ b/prover/crates/bin/prover_autoscaler/src/config.rs @@ -11,7 +11,10 @@ use zksync_config::configs::ObservabilityConfig; #[derive(Debug, Clone, PartialEq, Deserialize)] pub struct ProverAutoscalerConfig { /// Amount of time ProverJobMonitor will wait all it's tasks to finish. - #[serde(with = "humantime_serde")] + #[serde( + with = "humantime_serde", + default = "ProverAutoscalerConfig::default_graceful_shutdown_timeout" + )] pub graceful_shutdown_timeout: Duration, pub agent_config: Option, pub scaler_config: Option,