From 849c6a5dcd095e8fead0630a2a403f282c26a2aa Mon Sep 17 00:00:00 2001 From: EmilLuta Date: Thu, 8 Aug 2024 09:01:16 +0200 Subject: [PATCH 1/8] fix: Bump prover dependencies & rust toolchain (#2600) Signed-off-by: Harald Hoyer Co-authored-by: Igor Aleksanov Co-authored-by: Harald Hoyer --- Cargo.lock | 120 ++++----- Cargo.toml | 28 +- core/bin/external_node/src/config/mod.rs | 6 - core/bin/verified_sources_fetcher/src/main.rs | 4 +- core/lib/basic_types/src/basic_fri_types.rs | 1 + core/lib/config/src/configs/chain.rs | 8 +- .../src/packed_eth_signature.rs | 2 +- core/lib/dal/src/tee_proof_generation_dal.rs | 13 - .../src/tee_verifier_input_producer_dal.rs | 2 +- core/lib/merkle_tree/src/recovery/mod.rs | 6 +- core/lib/merkle_tree/src/storage/mod.rs | 10 +- core/lib/multivm/src/glue/tracers/mod.rs | 10 +- .../src/versions/vm_latest/oracles/storage.rs | 4 +- core/lib/types/src/fee_model.rs | 13 +- core/lib/types/src/storage/writes/mod.rs | 7 +- core/lib/vlog/src/logs/mod.rs | 5 +- .../src/tx_sender/master_pool_sink.rs | 5 +- core/node/vm_runner/src/storage.rs | 4 +- docker/build-base/Dockerfile | 4 +- docker/proof-fri-gpu-compressor/Dockerfile | 4 +- docker/prover-gpu-fri/Dockerfile | 4 +- .../20.04_amd64_cuda_11_8.Dockerfile | 2 +- .../20.04_amd64_cuda_12_0.Dockerfile | 2 +- docker/zk-environment/Dockerfile | 4 +- flake.lock | 30 +-- prover/Cargo.lock | 243 +++++++++--------- prover/Cargo.toml | 12 +- .../crates/bin/proof_fri_compressor/README.md | 2 +- .../src/utils.rs | 2 +- .../bin/witness_vector_generator/README.md | 2 +- .../src/fri_witness_generator_dal.rs | 1 + prover/rust-toolchain | 2 +- rust-toolchain | 2 +- zk_toolbox/Cargo.lock | 60 ++--- zk_toolbox/Cargo.toml | 2 +- 35 files changed, 290 insertions(+), 336 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 446bf4a8e956..8446831eca71 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -722,9 +722,9 @@ dependencies = [ [[package]] name = "boojum" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cf10f4b3980dc82dc31709dfa8193b7d6106a3a7ce9f9a9f8872bfb8719aa2d" +checksum = "df88daa33db46d683967ca09a4f04817c38950483f2501a771d497669a8a4bb1" dependencies = [ "arrayvec 0.7.4", "bincode", @@ -1045,14 +1045,14 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e59747066b9a0d1a15d45f5837658aec5d53744fb643954f9dcc412f76c0d346" +checksum = "c928cad0aeeb35e86f8605376fdbb27b506cfcec14af1f532459a47e34d8b6f9" dependencies = [ "derivative", "serde", "zk_evm 0.150.0", - "zkevm_circuits 0.150.1", + "zkevm_circuits 0.150.2", ] [[package]] @@ -1112,12 +1112,12 @@ dependencies = [ [[package]] name = "circuit_sequencer_api" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfeb50910b20c4f05cc51700b2396a655cef9e6f0c84debd71cb02ce4853902" +checksum = "18d8ca58b9bb7c63a75813c96a5a80314fd70013d7929f61fc0e9e69b0e440a7" dependencies = [ "bellman_ce", - "circuit_encodings 0.150.2-rc.2", + "circuit_encodings 0.150.2-rc.3", "derivative", "rayon", "serde", @@ -1578,9 +1578,9 @@ dependencies = [ [[package]] name = "cs_derive" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab1f510bfddd1fc643a1d1bf8a405e279ffc818ee7ac86ed658e667a44958178" +checksum = "24cf603ca4299c6e20e644da88897f7b81d688510f4887e818b0bfe0b792081b" dependencies = [ "proc-macro-error", "proc-macro2 1.0.86", @@ -1588,6 +1588,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ctor" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +dependencies = [ + "quote 1.0.36", + "syn 2.0.72", +] + [[package]] name = "ctr" version = "0.9.2" @@ -3499,26 +3509,6 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" -[[package]] -name = "linkme" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ed2ee9464ff9707af8e9ad834cffa4802f072caad90639c583dd3c62e6e608" -dependencies = [ - "linkme-impl", -] - -[[package]] -name = "linkme-impl" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba125974b109d512fccbc6c0244e7580143e460895dfd6ea7f8bbb692fd94396" -dependencies = [ - "proc-macro2 1.0.86", - "quote 1.0.36", - "syn 2.0.72", -] - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -7238,13 +7228,13 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "vise" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229baafe01d5177b63c6ee1def80d8e39a2365e64caf69ddb05a57594b15647c" +checksum = "90ade36f3548b1524396f4de7b36f4f210c8a01dfab568eb2bff466af64eb6e5" dependencies = [ "compile-fmt", + "ctor", "elsa", - "linkme", "once_cell", "prometheus-client", "vise-macros", @@ -7252,9 +7242,9 @@ dependencies = [ [[package]] name = "vise-exporter" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23981b18d697026f5430249ab01ba739ef2edc463e400042394331cb2bb63494" +checksum = "671d3b894d5d0849f0a597f56bf071f42d4f2a1cbcf2f78ca21f870ab7c0cc2b" dependencies = [ "hyper 0.14.29", "once_cell", @@ -7265,9 +7255,9 @@ dependencies = [ [[package]] name = "vise-macros" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb19c33cd5f04dcf4e767635e058a998edbc2b7fca32ade0a4a1cea0f8e9b34" +checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ "proc-macro2 1.0.86", "quote 1.0.36", @@ -7957,9 +7947,9 @@ dependencies = [ [[package]] name = "zkevm_circuits" -version = "0.150.1" +version = "0.150.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a85c1987a1e7e89f1f8c39ca19bffb61521e719050086372aaea8817f403fc" +checksum = "94d97632ba26e4e6a77a680d6b2bfbcc6f7b9b722976ee31afb922d16a675d45" dependencies = [ "arrayvec 0.7.4", "boojum", @@ -8115,7 +8105,7 @@ dependencies = [ "anyhow", "circuit_sequencer_api 0.140.0", "circuit_sequencer_api 0.141.1", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_sequencer_api 0.150.2-rc.3", "futures 0.3.28", "itertools 0.10.5", "num_cpus", @@ -8142,9 +8132,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "209b83578357184ab72af4d4cb2eca76f85e5f2f35d739a47e3fd5931eb9252d" +checksum = "a9f9a4352244ccd5e5fd34fb0d029861a5f57b05c80fe7944a7b532f54c58f89" dependencies = [ "anyhow", "once_cell", @@ -8177,9 +8167,9 @@ dependencies = [ [[package]] name = "zksync_consensus_bft" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7c7f1613bdb9d02b21d66ab60bdf6523456dcc5006290cd67702d3f729f549e" +checksum = "f69309c1c9e2c730b8858af2301cc8762280dab8b838b571524e4d43107aa7ff" dependencies = [ "anyhow", "async-trait", @@ -8199,9 +8189,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1abf1f3d9c8109da32a6d5e61a2a64a61b0bff90fdd355992522a4e8a57e69" +checksum = "f8c91270540e8db9479e1eaedaf0e600de468f71ccd5dc7c0258072e743830e6" dependencies = [ "anyhow", "blst", @@ -8223,9 +8213,9 @@ dependencies = [ [[package]] name = "zksync_consensus_executor" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8495b9056a895ee4e720b803d3e26ffad18776ae374805bab34a5ff5b648be6e" +checksum = "a05b45ae9c0bf45f4acc6833dca34907404d1ddd9041a5cd554751c2c5710764" dependencies = [ "anyhow", "async-trait", @@ -8244,9 +8234,9 @@ dependencies = [ [[package]] name = "zksync_consensus_network" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ec4a076c63c76599711a7dc28cdf3a7923b6bc7720bc572ea11e92fb2b526f" +checksum = "477abd01af60faa5afffbff651cbdf9d108bcae4f1326b508bc84063126d34f9" dependencies = [ "anyhow", "async-trait", @@ -8279,9 +8269,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e0b04d43a542a3bb1af0ac4c0a17acf6b743607c3cb9028192df0c7d2f5b24" +checksum = "87e79025fd678ec2733add1697645827e9daed3f120c8cebf43513ac17e65b63" dependencies = [ "anyhow", "bit-vec", @@ -8301,9 +8291,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c39f79628bd6685f7ec5561874c007f5d26693d6ba7e5595dfa260981e8f006" +checksum = "470991a42d5f9a3f2385ebe52889e63742d95d141b80b95a1eabe9f51e18cb7e" dependencies = [ "anyhow", "async-trait", @@ -8321,9 +8311,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4514629a34abdf943ef911c16228dfec656edb02d8412db4febd4df5ccf3f91" +checksum = "6c43283b5813fd887e0e7ccaee73c6e41907b1de311a3a01b2fa5f2e3f2ba503" dependencies = [ "anyhow", "rand 0.8.5", @@ -8767,9 +8757,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c44edd3a3316dcab45aab7e190c96150f2586d4a92fa21f93dcc20178308313a" +checksum = "9235fbdaa98f27b9aacaa861bcb850b6b0dbf37e59477ce3f08c64555a25d00d" dependencies = [ "boojum", "derivative", @@ -8779,7 +8769,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "zkevm_circuits 0.150.1", + "zkevm_circuits 0.150.2", ] [[package]] @@ -8891,7 +8881,7 @@ dependencies = [ "circuit_sequencer_api 0.140.0", "circuit_sequencer_api 0.141.1", "circuit_sequencer_api 0.142.0", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_sequencer_api 0.150.2-rc.3", "ethabi", "hex", "itertools 0.10.5", @@ -9261,9 +9251,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53128384270314bfbd4e044c15138af63cb3a505ab95bb3339f3b866ccbe211c" +checksum = "b5db598a518958b244aed5e3f925c763808429a5ea022bb50957b98e68540495" dependencies = [ "anyhow", "bit-vec", @@ -9282,9 +9272,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d7dfb4dcdd48ab5fa1ccff25f585d73b58cf95e0fb74e96618dd666f198a005" +checksum = "4047ed624c7a19e206125f8259f7e175ad70020beeb66e1975e068af060d2fb5" dependencies = [ "anyhow", "heck 0.5.0", @@ -9332,7 +9322,7 @@ version = "0.1.0" dependencies = [ "bincode", "chrono", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_sequencer_api 0.150.2-rc.3", "serde", "serde_json", "serde_with", diff --git a/Cargo.toml b/Cargo.toml index 4210911a2596..691ce1c4c3ff 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -194,8 +194,8 @@ proc-macro2 = "1.0" trybuild = "1.0" # "Internal" dependencies -vise = "0.1.0" -vise-exporter = "0.1.0" +vise = "0.2.0" +vise-exporter = "0.2.0" # Here and below: # We *always* pin the latest version of protocol to disallow accidental changes in the execution logic. @@ -205,9 +205,9 @@ circuit_sequencer_api_1_3_3 = { package = "circuit_sequencer_api", version = "0. circuit_sequencer_api_1_4_0 = { package = "circuit_sequencer_api", version = "0.140" } circuit_sequencer_api_1_4_1 = { package = "circuit_sequencer_api", version = "0.141" } circuit_sequencer_api_1_4_2 = { package = "circuit_sequencer_api", version = "0.142" } -circuit_sequencer_api_1_5_0 = { package = "circuit_sequencer_api", version = "=0.150.2-rc.2" } +circuit_sequencer_api_1_5_0 = { package = "circuit_sequencer_api", version = "=0.150.2-rc.3" } crypto_codegen = { package = "zksync_solidity_vk_codegen", version = "=0.1.0" } -kzg = { package = "zksync_kzg", version = "=0.150.2-rc.2" } +kzg = { package = "zksync_kzg", version = "=0.150.2-rc.3" } zk_evm = { version = "=0.133.0" } zk_evm_1_3_1 = { package = "zk_evm", version = "0.131.0-rc.2" } zk_evm_1_3_3 = { package = "zk_evm", version = "0.133.0" } @@ -216,16 +216,16 @@ zk_evm_1_4_1 = { package = "zk_evm", version = "0.141.0" } zk_evm_1_5_0 = { package = "zk_evm", version = "=0.150.0" } # Consensus dependencies. -zksync_concurrency = "=0.1.0-rc.8" -zksync_consensus_bft = "=0.1.0-rc.8" -zksync_consensus_crypto = "=0.1.0-rc.8" -zksync_consensus_executor = "=0.1.0-rc.8" -zksync_consensus_network = "=0.1.0-rc.8" -zksync_consensus_roles = "=0.1.0-rc.8" -zksync_consensus_storage = "=0.1.0-rc.8" -zksync_consensus_utils = "=0.1.0-rc.8" -zksync_protobuf = "=0.1.0-rc.8" -zksync_protobuf_build = "=0.1.0-rc.8" +zksync_concurrency = "=0.1.0-rc.9" +zksync_consensus_bft = "=0.1.0-rc.9" +zksync_consensus_crypto = "=0.1.0-rc.9" +zksync_consensus_executor = "=0.1.0-rc.9" +zksync_consensus_network = "=0.1.0-rc.9" +zksync_consensus_roles = "=0.1.0-rc.9" +zksync_consensus_storage = "=0.1.0-rc.9" +zksync_consensus_utils = "=0.1.0-rc.9" +zksync_protobuf = "=0.1.0-rc.9" +zksync_protobuf_build = "=0.1.0-rc.9" # "Local" dependencies zksync_multivm = { version = "0.1.0", path = "core/lib/multivm" } diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index b0f0fa794fa1..80cfde02e5c6 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -223,12 +223,6 @@ impl RemoteENConfig { } } -#[derive(Debug, Deserialize)] -pub(crate) enum BlockFetcher { - ServerAPI, - Consensus, -} - /// This part of the external node config is completely optional to provide. /// It can tweak limits of the API, delay intervals of certain components, etc. /// If any of the fields are not provided, the default values will be used. diff --git a/core/bin/verified_sources_fetcher/src/main.rs b/core/bin/verified_sources_fetcher/src/main.rs index 51ec8e9de7d6..981eebf4a706 100644 --- a/core/bin/verified_sources_fetcher/src/main.rs +++ b/core/bin/verified_sources_fetcher/src/main.rs @@ -52,14 +52,14 @@ async fn main() { file.write_all(content.as_bytes()).unwrap(); } SourceCodeData::StandardJsonInput(input) => { - let sources = input.get(&"sources".to_string()).unwrap().clone(); + let sources = input.get("sources").unwrap().clone(); for (key, val) in sources.as_object().unwrap() { let p = format!("{}/{}", &dir, key); let path = std::path::Path::new(p.as_str()); let prefix = path.parent().unwrap(); std::fs::create_dir_all(prefix).unwrap(); let mut file = std::fs::File::create(path).unwrap(); - let content = val.get(&"content".to_string()).unwrap().as_str().unwrap(); + let content = val.get("content").unwrap().as_str().unwrap(); file.write_all(content.as_bytes()).unwrap(); } } diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index a1563ff7e590..9765435f0973 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -27,6 +27,7 @@ type Eip4844BlobsInner = [Option; MAX_4844_BLOBS_PER_BLOCK]; /// - there are between [1, 16] blobs /// - all blobs are of the same size [`EIP_4844_BLOB_SIZE`] /// - there may be no blobs in case of Validium +/// /// Creating a structure violating these constraints will panic. /// /// Note: blobs are padded to fit the correct size. diff --git a/core/lib/config/src/configs/chain.rs b/core/lib/config/src/configs/chain.rs index 53884c4a7227..6ac70b27b84a 100644 --- a/core/lib/config/src/configs/chain.rs +++ b/core/lib/config/src/configs/chain.rs @@ -30,11 +30,11 @@ impl NetworkConfig { /// An enum that represents the version of the fee model to use. /// - `V1`, the first model that was used in ZKsync Era. In this fee model, the pubdata price must be pegged to the L1 gas price. -/// Also, the fair L2 gas price is expected to only include the proving/computation price for the operator and not the costs that come from -/// processing the batch on L1. +/// Also, the fair L2 gas price is expected to only include the proving/computation price for the operator and not the costs that come from +/// processing the batch on L1. /// - `V2`, the second model that was used in ZKsync Era. There the pubdata price might be independent from the L1 gas price. Also, -/// The fair L2 gas price is expected to both the proving/computation price for the operator and the costs that come from -/// processing the batch on L1. +/// The fair L2 gas price is expected to both the proving/computation price for the operator and the costs that come from +/// processing the batch on L1. #[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq)] pub enum FeeModelVersion { V1, diff --git a/core/lib/crypto_primitives/src/packed_eth_signature.rs b/core/lib/crypto_primitives/src/packed_eth_signature.rs index 7e5efc07bb92..3d76de73560e 100644 --- a/core/lib/crypto_primitives/src/packed_eth_signature.rs +++ b/core/lib/crypto_primitives/src/packed_eth_signature.rs @@ -22,7 +22,7 @@ use crate::{ /// /// That is why: /// 1) when we create this structure by deserialization of message produced by user -/// we subtract 27 from v in `ETHSignature` if necessary and store it in the `ETHSignature` structure this way. +/// we subtract 27 from v in `ETHSignature` if necessary and store it in the `ETHSignature` structure this way. /// 2) When we serialize/create this structure we add 27 to v in `ETHSignature`. /// /// This way when we have methods that consumes &self we can be sure that ETHSignature::recover_signer works diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 0ddf36abdbed..43e86116092b 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,6 +1,5 @@ use std::time::Duration; -use strum::{Display, EnumString}; use zksync_db_connection::{ connection::Connection, error::DalResult, @@ -16,18 +15,6 @@ pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } -#[derive(Debug, EnumString, Display)] -enum TeeProofGenerationJobStatus { - #[strum(serialize = "ready_to_be_proven")] - ReadyToBeProven, - #[strum(serialize = "picked_by_prover")] - PickedByProver, - #[strum(serialize = "generated")] - Generated, - #[strum(serialize = "skipped")] - Skipped, -} - impl TeeProofGenerationDal<'_, '_> { pub async fn get_next_block_to_be_proven( &mut self, diff --git a/core/lib/dal/src/tee_verifier_input_producer_dal.rs b/core/lib/dal/src/tee_verifier_input_producer_dal.rs index bdf899fa36f8..4adee62e7aa6 100644 --- a/core/lib/dal/src/tee_verifier_input_producer_dal.rs +++ b/core/lib/dal/src/tee_verifier_input_producer_dal.rs @@ -33,7 +33,7 @@ pub enum TeeVerifierInputProducerJobStatus { /// It is expected to be used if some jobs should be skipped like: /// - testing purposes (want to check a specific L1 Batch, I can mark everything before it skipped) /// - trim down costs on some environments (if I've done breaking changes, - /// makes no sense to wait for everything to be processed, I can just skip them and save resources) + /// makes no sense to wait for everything to be processed, I can just skip them and save resources) ManuallySkipped, /// Currently being processed by one of the jobs. Transitory state, will transition to either /// [`TeeVerifierInputProducerStatus::Successful`] or [`TeeVerifierInputProducerStatus::Failed`]. diff --git a/core/lib/merkle_tree/src/recovery/mod.rs b/core/lib/merkle_tree/src/recovery/mod.rs index c208c12795a2..51dc87e16d3e 100644 --- a/core/lib/merkle_tree/src/recovery/mod.rs +++ b/core/lib/merkle_tree/src/recovery/mod.rs @@ -15,11 +15,11 @@ //! Recovery process proceeds as follows: //! //! 1. Initialize a tree in the recovery mode. Until recovery is finished, the tree cannot be accessed -//! using ordinary [`MerkleTree`] APIs. +//! using ordinary [`MerkleTree`] APIs. //! 2. Update the tree from a snapshot, which [is fed to the tree](MerkleTreeRecovery::extend()) -//! as [`RecoveryEntry`] chunks. Recovery entries must be ordered by increasing key. +//! as [`RecoveryEntry`] chunks. Recovery entries must be ordered by increasing key. //! 3. Finalize recovery using [`MerkleTreeRecovery::finalize()`]. To check integrity, you may compare -//! [`MerkleTreeRecovery::root_hash()`] to the reference value. +//! [`MerkleTreeRecovery::root_hash()`] to the reference value. //! //! The recovery process is tolerant to crashes and may be resumed from the middle. To find the latest //! recovered key, you may use [`MerkleTreeRecovery::last_processed_key()`]. diff --git a/core/lib/merkle_tree/src/storage/mod.rs b/core/lib/merkle_tree/src/storage/mod.rs index b70485b93188..dfc99f9be7cb 100644 --- a/core/lib/merkle_tree/src/storage/mod.rs +++ b/core/lib/merkle_tree/src/storage/mod.rs @@ -118,13 +118,13 @@ impl TreeUpdater { /// /// 1. Walk from the root of the tree along the inserted `key` while we can. /// 2. If the node we've stopped at is an internal node, it means it doesn't have - /// a child at the corresponding nibble from `key`. Create a new leaf node with `key` and - /// `value_hash` and insert it as a new child of the found internal node. + /// a child at the corresponding nibble from `key`. Create a new leaf node with `key` and + /// `value_hash` and insert it as a new child of the found internal node. /// 3. Else the node we've stopped is a leaf. If the full key stored in this leaf is `key`, - /// we just need to update `value_hash` stored in the leaf. + /// we just need to update `value_hash` stored in the leaf. /// 4. Else (the node we've stopped is a leaf with `other_key != key`) we need to create - /// one or more internal nodes that would contain the common prefix between `key` - /// and `other_key` and a "fork" where these keys differ. + /// one or more internal nodes that would contain the common prefix between `key` + /// and `other_key` and a "fork" where these keys differ. /// /// We change step 1 by starting not from the root, but rather from the node ancestor /// we've found in [`Self::load_ancestors()`] for a (moderate) performance boost. Note that diff --git a/core/lib/multivm/src/glue/tracers/mod.rs b/core/lib/multivm/src/glue/tracers/mod.rs index 8a138d9461fd..7aa792ef1f71 100644 --- a/core/lib/multivm/src/glue/tracers/mod.rs +++ b/core/lib/multivm/src/glue/tracers/mod.rs @@ -8,16 +8,16 @@ //! this module defines one primary trait: //! //! - `MultiVMTracer`: This trait represents a tracer that can be converted into a tracer for -//! a specific VM version. +//! a specific VM version. //! //! Specific traits for each VM version, which support Custom Tracers: //! - `IntoLatestTracer`: This trait is responsible for converting a tracer -//! into a form compatible with the latest VM version. -//! It defines a method `latest` for obtaining a boxed tracer. +//! into a form compatible with the latest VM version. +//! It defines a method `latest` for obtaining a boxed tracer. //! //! - `IntoVmVirtualBlocksTracer`: This trait is responsible for converting a tracer -//! into a form compatible with the vm_virtual_blocks version. -//! It defines a method `vm_virtual_blocks` for obtaining a boxed tracer. +//! into a form compatible with the vm_virtual_blocks version. +//! It defines a method `vm_virtual_blocks` for obtaining a boxed tracer. //! //! For `MultiVMTracer` to be implemented, the Tracer must implement all N currently //! existing sub-traits. diff --git a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs index 22503ce9881c..075660ad58aa 100644 --- a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs @@ -37,9 +37,9 @@ use crate::{ /// We employ the following rules for cold/warm storage rules: /// - We price a single "I/O" access as 2k ergs. This means that reading a single storage slot -/// would cost 2k ergs, while writing to it would 4k ergs (since it involves both reading during execution and writing at the end of it). +/// would cost 2k ergs, while writing to it would 4k ergs (since it involves both reading during execution and writing at the end of it). /// - Thereafter, "warm" reads cost 30 ergs, while "warm" writes cost 60 ergs. Warm writes to account cost more for the fact that they may be reverted -/// and so require more RAM to store them. +/// and so require more RAM to store them. const WARM_READ_REFUND: u32 = STORAGE_ACCESS_COLD_READ_COST - STORAGE_ACCESS_WARM_READ_COST; const WARM_WRITE_REFUND: u32 = STORAGE_ACCESS_COLD_WRITE_COST - STORAGE_ACCESS_WARM_WRITE_COST; diff --git a/core/lib/types/src/fee_model.rs b/core/lib/types/src/fee_model.rs index 6f5985d46108..b59aa65b04e0 100644 --- a/core/lib/types/src/fee_model.rs +++ b/core/lib/types/src/fee_model.rs @@ -9,9 +9,10 @@ use crate::ProtocolVersionId; /// Fee input to be provided into the VM. It contains two options: /// - `L1Pegged`: L1 gas price is provided to the VM, and the pubdata price is derived from it. Using this option is required for the -/// versions of Era prior to 1.4.1 integration. +/// versions of Era prior to 1.4.1 integration. /// - `PubdataIndependent`: L1 gas price and pubdata price are not necessarily dependent on one another. This options is more suitable for the -/// versions of Era after the 1.4.1 integration. It is expected that if a VM supports `PubdataIndependent` version, then it should also support `L1Pegged` version, but converting it into `PubdataIndependentBatchFeeModelInput` in-place. +/// versions of Era after the 1.4.1 integration. It is expected that if a VM supports `PubdataIndependent` version, then it should also support +/// `L1Pegged` version, but converting it into `PubdataIndependentBatchFeeModelInput` in-place. #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum BatchFeeInput { L1Pegged(L1PeggedBatchFeeModelInput), @@ -161,11 +162,11 @@ pub struct PubdataIndependentBatchFeeModelInput { /// The enum which represents the version of the fee model. It is used to determine which fee model should be used for the batch. /// - `V1`, the first model that was used in ZKsync Era. In this fee model, the pubdata price must be pegged to the L1 gas price. -/// Also, the fair L2 gas price is expected to only include the proving/computation price for the operator and not the costs that come from -/// processing the batch on L1. +/// Also, the fair L2 gas price is expected to only include the proving/computation price for the operator and not the costs that come from +/// processing the batch on L1. /// - `V2`, the second model that was used in ZKsync Era. There the pubdata price might be independent from the L1 gas price. Also, -/// The fair L2 gas price is expected to both the proving/computation price for the operator and the costs that come from -/// processing the batch on L1. +/// The fair L2 gas price is expected to both the proving/computation price for the operator and the costs that come from +/// processing the batch on L1. #[derive(Debug, Clone, Copy, Serialize, Deserialize)] pub enum FeeModelConfig { V1(FeeModelConfigV1), diff --git a/core/lib/types/src/storage/writes/mod.rs b/core/lib/types/src/storage/writes/mod.rs index ef19eeffed02..88f032db1924 100644 --- a/core/lib/types/src/storage/writes/mod.rs +++ b/core/lib/types/src/storage/writes/mod.rs @@ -122,10 +122,11 @@ impl StateDiffRecord { /// entry <- enumeration_index || compressed value /// 2. if initial write: /// entry <- blake2(bytes32(address), key) || compressed value + /// /// size: - /// initial: max of 65 bytes - /// repeated: max of 38 bytes - /// before: 156 bytes for each + /// - initial: max of 65 bytes + /// - repeated: max of 38 bytes + /// - before: 156 bytes for each pub fn compress(&self) -> Vec { let mut comp_state_diff = match self.enumeration_index { 0 => self.derived_key.to_vec(), diff --git a/core/lib/vlog/src/logs/mod.rs b/core/lib/vlog/src/logs/mod.rs index 0ecf1c6d9f0b..2379119ddffc 100644 --- a/core/lib/vlog/src/logs/mod.rs +++ b/core/lib/vlog/src/logs/mod.rs @@ -1,4 +1,4 @@ -use std::{backtrace::Backtrace, panic::PanicInfo, str::FromStr}; +use std::{backtrace::Backtrace, str::FromStr}; use serde::Deserialize; use tracing_subscriber::{fmt, registry::LookupSpan, EnvFilter, Layer}; @@ -129,7 +129,8 @@ impl Logs { } } -fn json_panic_handler(panic_info: &PanicInfo) { +#[allow(deprecated)] // Not available yet on stable, so we can't switch right now. +fn json_panic_handler(panic_info: &std::panic::PanicInfo) { let backtrace = Backtrace::force_capture(); let timestamp = chrono::Utc::now(); let panic_message = if let Some(s) = panic_info.payload().downcast_ref::() { diff --git a/core/node/api_server/src/tx_sender/master_pool_sink.rs b/core/node/api_server/src/tx_sender/master_pool_sink.rs index b7478b9c9711..cb4e73e3bb79 100644 --- a/core/node/api_server/src/tx_sender/master_pool_sink.rs +++ b/core/node/api_server/src/tx_sender/master_pool_sink.rs @@ -56,9 +56,8 @@ impl TxSink for MasterPoolSink { .transactions_dal() .insert_transaction_l2(tx, execution_metrics) .await - .map(|submission_res_handle| { - APP_METRICS.processed_txs[&TxStage::Mempool(submission_res_handle)].inc(); - submission_res_handle + .inspect(|submission_res_handle| { + APP_METRICS.processed_txs[&TxStage::Mempool(*submission_res_handle)].inc(); }) .map_err(|err| err.generalize().into()), Err(err) => Err(err.generalize().into()), diff --git a/core/node/vm_runner/src/storage.rs b/core/node/vm_runner/src/storage.rs index 75ed4cb57a9c..1c83b6525c7b 100644 --- a/core/node/vm_runner/src/storage.rs +++ b/core/node/vm_runner/src/storage.rs @@ -57,8 +57,8 @@ struct BatchData { /// Abstraction for VM runner's storage layer that provides two main features: /// /// 1. A [`ReadStorageFactory`] implementation backed by either Postgres or RocksDB (if it's -/// caught up). Always initialized as a `Postgres` variant and is then mutated into `Rocksdb` -/// once RocksDB cache is caught up. +/// caught up). Always initialized as a `Postgres` variant and is then mutated into `Rocksdb` +/// once RocksDB cache is caught up. /// 2. Loads data needed to re-execute the next unprocessed L1 batch. /// /// Users of `VmRunnerStorage` are not supposed to retain storage access to batches that are less diff --git a/docker/build-base/Dockerfile b/docker/build-base/Dockerfile index 68ea7ce001c7..be3c6ddb452e 100644 --- a/docker/build-base/Dockerfile +++ b/docker/build-base/Dockerfile @@ -9,7 +9,7 @@ ENV RUSTUP_HOME=/usr/local/rustup \ PATH=/usr/local/cargo/bin:$PATH RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ - rustup install nightly-2024-05-07 && \ - rustup default nightly-2024-05-07 + rustup install nightly-2024-08-01 && \ + rustup default nightly-2024-08-01 RUN cargo install sqlx-cli --version 0.8.0 diff --git a/docker/proof-fri-gpu-compressor/Dockerfile b/docker/proof-fri-gpu-compressor/Dockerfile index e6d2e0f11627..02ca4a3b77b0 100644 --- a/docker/proof-fri-gpu-compressor/Dockerfile +++ b/docker/proof-fri-gpu-compressor/Dockerfile @@ -15,8 +15,8 @@ ENV RUSTUP_HOME=/usr/local/rustup \ PATH=/usr/local/cargo/bin:$PATH RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ - rustup install nightly-2024-05-07 && \ - rustup default nightly-2024-05-07 + rustup install nightly-2024-08-01 && \ + rustup default nightly-2024-08-01 RUN curl -Lo cmake-3.24.2-linux-x86_64.sh https://github.com/Kitware/CMake/releases/download/v3.24.2/cmake-3.24.2-linux-x86_64.sh && \ chmod +x cmake-3.24.2-linux-x86_64.sh && \ diff --git a/docker/prover-gpu-fri/Dockerfile b/docker/prover-gpu-fri/Dockerfile index 0894c1c0c47d..1f1aaa447f22 100644 --- a/docker/prover-gpu-fri/Dockerfile +++ b/docker/prover-gpu-fri/Dockerfile @@ -14,8 +14,8 @@ ENV RUSTUP_HOME=/usr/local/rustup \ PATH=/usr/local/cargo/bin:$PATH RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ - rustup install nightly-2024-05-07 && \ - rustup default nightly-2024-05-07 + rustup install nightly-2024-08-01 && \ + rustup default nightly-2024-08-01 RUN curl -Lo cmake-3.24.2-linux-x86_64.sh https://github.com/Kitware/CMake/releases/download/v3.24.2/cmake-3.24.2-linux-x86_64.sh && \ chmod +x cmake-3.24.2-linux-x86_64.sh && \ diff --git a/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile b/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile index 645111a4f0a6..0c0fd7a9bb3d 100644 --- a/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile +++ b/docker/zk-environment/20.04_amd64_cuda_11_8.Dockerfile @@ -78,7 +78,7 @@ RUN echo "deb http://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/so gcloud config set metrics/environment github_docker_image RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y -RUN rustup install nightly-2024-05-07 +RUN rustup install nightly-2024-08-01 RUN rustup default stable RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest diff --git a/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile b/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile index c7af93ce906a..5bd569b7d20b 100644 --- a/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile +++ b/docker/zk-environment/20.04_amd64_cuda_12_0.Dockerfile @@ -76,7 +76,7 @@ RUN echo "deb http://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/so gcloud config set metrics/environment github_docker_image RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y -RUN rustup install nightly-2024-05-07 +RUN rustup install nightly-2024-08-01 RUN rustup default stable RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile index 9dc7aa1a13b4..db9fb0ce5971 100644 --- a/docker/zk-environment/Dockerfile +++ b/docker/zk-environment/Dockerfile @@ -142,5 +142,5 @@ ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache FROM rust-lightweight as rust-lightweight-nightly -RUN rustup install nightly-2024-05-07 && \ - rustup default nightly-2024-05-07 +RUN rustup install nightly-2024-08-01 && \ + rustup default nightly-2024-08-01 diff --git a/flake.lock b/flake.lock index fe16e2254b51..e217d37664cd 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1720226507, - "narHash": "sha256-yHVvNsgrpyNTXZBEokL8uyB2J6gB1wEx0KOJzoeZi1A=", + "lastModified": 1722960479, + "narHash": "sha256-NhCkJJQhD5GUib8zN9JrmYGMwt4lCRp6ZVNzIiYCl0Y=", "owner": "ipetkov", "repo": "crane", - "rev": "0aed560c5c0a61c9385bddff471a13036203e11c", + "rev": "4c6c77920b8d44cd6660c1621dea6b3fc4b4c4f4", "type": "github" }, "original": { @@ -257,11 +257,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1719956923, - "narHash": "sha256-nNJHJ9kfPdzYsCOlHOnbiiyKjZUW5sWbwx3cakg3/C4=", + "lastModified": 1722869614, + "narHash": "sha256-7ojM1KSk3mzutD7SkrdSflHXEujPvW1u7QuqWoTLXQU=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "706eef542dec88cc0ed25b9075d3037564b2d164", + "rev": "883180e6550c1723395a3a342f830bfc5c371f6b", "type": "github" }, "original": { @@ -341,11 +341,11 @@ "snowfall-lib": "snowfall-lib" }, "locked": { - "lastModified": 1719923509, - "narHash": "sha256-3buuJSKCVT0o42jpreoflYA+Rlp/4eQKATEAY+pPeh8=", + "lastModified": 1721741092, + "narHash": "sha256-ghFoP5gZpc1i4I4PiVCH00QNZ6s6ipGUcA0P1TsSSC8=", "owner": "matter-labs", "repo": "nixsgx", - "rev": "520ad6227523c5720468726f9e945cecdb7a37aa", + "rev": "be2c19592d0d5601184c52c07ab6d88dec07ffd6", "type": "github" }, "original": { @@ -407,11 +407,11 @@ "nixpkgs": "nixpkgs_3" }, "locked": { - "lastModified": 1720059535, - "narHash": "sha256-h/O3PoV3KvQG4tC5UpANBZOsptAZCzEGiwyi+3oSpYc=", + "lastModified": 1722997267, + "narHash": "sha256-8Pncp8IKd0f0N711CRrCGTC4iLfBE+/5kaMqyWxnYic=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "8deeed2dfa21837c7792b46b6a9b2e73f97b472b", + "rev": "d720bf3cebac38c2426d77ee2e59943012854cb8", "type": "github" }, "original": { @@ -623,11 +623,11 @@ "vault-auth-tee-flake": "vault-auth-tee-flake" }, "locked": { - "lastModified": 1720011517, - "narHash": "sha256-1oo9Z47CNdqDgtGNE1LC+6CQ+VXcy7TtFFnvifBnVLE=", + "lastModified": 1723034739, + "narHash": "sha256-bu4XvqwsPUzfMzk5t10wyHliItfH7FOk42V0CIwl4lg=", "owner": "matter-labs", "repo": "teepot", - "rev": "8dadc1f76b7dd8a98be7781e8206fed5268dd0e6", + "rev": "4ed311a16a72521f79418216ad29e6eed8db347d", "type": "github" }, "original": { diff --git a/prover/Cargo.lock b/prover/Cargo.lock index a88155be9024..a7400fc2490e 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -524,7 +524,7 @@ version = "0.69.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cexpr", "clang-sys", "itertools 0.12.1", @@ -567,9 +567,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" dependencies = [ "serde", ] @@ -698,9 +698,9 @@ dependencies = [ [[package]] name = "boojum" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cf10f4b3980dc82dc31709dfa8193b7d6106a3a7ce9f9a9f8872bfb8719aa2d" +checksum = "df88daa33db46d683967ca09a4f04817c38950483f2501a771d497669a8a4bb1" dependencies = [ "arrayvec 0.7.4", "bincode", @@ -730,9 +730,9 @@ dependencies = [ [[package]] name = "boojum-cuda" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf1cadeaf29fa0f076c230a08f57932619b2ba46a1977c72bb42160574400f54" +checksum = "407123a79308091866f0199d510ee2fb930727204dd77d6805b3437d6cb859eb" dependencies = [ "boojum", "cmake", @@ -921,9 +921,9 @@ dependencies = [ [[package]] name = "circuit_definitions" -version = "0.140.0-gpu-wrapper.1" +version = "0.140.1-gpu-wrapper.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f38b36c1cbc5e53bb016eebbaa0740f939bec9154100e9e31d28035faed202" +checksum = "54965c22dfd81bca2a8abd53f140c1907304b7aad0fd69679389a96202129003" dependencies = [ "crossbeam 0.8.4", "derivative", @@ -931,16 +931,16 @@ dependencies = [ "serde", "snark_wrapper", "zk_evm 0.140.0", - "zkevm_circuits 0.140.1", + "zkevm_circuits 0.140.2", ] [[package]] name = "circuit_definitions" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98221101f42cceafa9baf8ec320ef78aa02bf7c2be5c398bf90e7acf1709bfa" +checksum = "68c5da9d10ee04601445afac76591f838b4f64f4f8fb8c3d1b3414a260d51b6c" dependencies = [ - "circuit_encodings 0.150.2-rc.2", + "circuit_encodings 0.150.2-rc.3", "crossbeam 0.8.4", "derivative", "seq-macro", @@ -957,7 +957,7 @@ dependencies = [ "derivative", "serde", "zk_evm 0.140.0", - "zkevm_circuits 0.140.1", + "zkevm_circuits 0.140.2", ] [[package]] @@ -986,14 +986,14 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e59747066b9a0d1a15d45f5837658aec5d53744fb643954f9dcc412f76c0d346" +checksum = "c928cad0aeeb35e86f8605376fdbb27b506cfcec14af1f532459a47e34d8b6f9" dependencies = [ "derivative", "serde", "zk_evm 0.150.0", - "zkevm_circuits 0.150.1", + "zkevm_circuits 0.150.2", ] [[package]] @@ -1053,12 +1053,12 @@ dependencies = [ [[package]] name = "circuit_sequencer_api" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfeb50910b20c4f05cc51700b2396a655cef9e6f0c84debd71cb02ce4853902" +checksum = "18d8ca58b9bb7c63a75813c96a5a80314fd70013d7929f61fc0e9e69b0e440a7" dependencies = [ "bellman_ce 0.7.0", - "circuit_encodings 0.150.2-rc.2", + "circuit_encodings 0.150.2-rc.3", "derivative", "rayon", "serde", @@ -1478,9 +1478,9 @@ dependencies = [ [[package]] name = "cs_derive" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab1f510bfddd1fc643a1d1bf8a405e279ffc818ee7ac86ed658e667a44958178" +checksum = "24cf603ca4299c6e20e644da88897f7b81d688510f4887e818b0bfe0b792081b" dependencies = [ "proc-macro-error", "proc-macro2 1.0.85", @@ -1488,6 +1488,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ctor" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +dependencies = [ + "quote 1.0.36", + "syn 2.0.66", +] + [[package]] name = "ctrlc" version = "3.4.4" @@ -1873,22 +1883,21 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "era_cudart" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1725b17e5e41b89f566ace3900f119fdc87f04e2daa8e253b668573ad67a454f" +checksum = "6592e1277ac1ab0f3925151784a3809f4f973b1a63a0244b6d44e3872b413199" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "era_cudart_sys", "paste", ] [[package]] name = "era_cudart_sys" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60d46683f8a9a5364874f95b00073f6dc93d33e9a019f150b0d6ce09ffc13251" +checksum = "21767c452b418a7fb2bb9ffb07c744e4616da8d14176db4dcab76649c3206ece" dependencies = [ - "bindgen 0.69.4", "serde_json", ] @@ -2126,9 +2135,9 @@ dependencies = [ [[package]] name = "franklin-crypto" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e35c05c35290529cf4704bf667777bda5d1b757d63445591cd19163ee0909df8" +checksum = "05eab544ba915269919b5f158a061b540a4e3a04150c1346481f4f7b80eb6311" dependencies = [ "arr_macro", "bellman_ce 0.8.0", @@ -3317,26 +3326,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "linkme" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccb76662d78edc9f9bf56360d6919bdacc8b7761227727e5082f128eeb90bbf5" -dependencies = [ - "linkme-impl", -] - -[[package]] -name = "linkme-impl" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dccda732e04fa3baf2e17cf835bfe2601c7c2edafd64417c627dabae3a8cda" -dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", -] - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3611,7 +3600,7 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if 1.0.0", "cfg_aliases", "libc", @@ -3860,7 +3849,7 @@ version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if 1.0.0", "foreign-types", "libc", @@ -4369,7 +4358,7 @@ checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.5.0", + "bitflags 2.6.0", "lazy_static", "num-traits", "rand 0.8.5", @@ -4507,7 +4496,7 @@ dependencies = [ "anyhow", "bincode", "chrono", - "circuit_definitions 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", "clap 4.5.4", "colored", "dialoguer", @@ -4518,7 +4507,7 @@ dependencies = [ "tokio", "tracing", "tracing-subscriber", - "zkevm_test_harness 0.150.2-rc.2", + "zkevm_test_harness 0.150.2-rc.3", "zksync_basic_types", "zksync_config", "zksync_contracts", @@ -4566,7 +4555,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "memchr", "unicase", ] @@ -4721,14 +4710,14 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] name = "regex" -version = "1.10.4" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", @@ -4881,9 +4870,9 @@ dependencies = [ [[package]] name = "rescue_poseidon" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf8f3aca783485c92639215977a2219cf25ba26836d088d75a088e7cba842bf" +checksum = "f27fbc6ba44baf99a0ca8387b1fa1cf90d3d7062860c1afedbbb64454829acc5" dependencies = [ "addchain", "arrayvec 0.7.4", @@ -5058,7 +5047,7 @@ version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", @@ -5273,7 +5262,7 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -5605,15 +5594,15 @@ checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shivini" -version = "0.150.2-rc.2" +version = "0.150.2-rc3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c35f41f876fc986d18e02d97a20a6980ca108c28f77cc844406d1c9c1f6ed9" +checksum = "110bb1fe4020af4f1be74f467b69bace76a98a3ecedc4c654ed90cc7c6a9aaba" dependencies = [ "bincode", "blake2 0.10.6", "boojum", "boojum-cuda", - "circuit_definitions 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", "derivative", "era_cudart", "era_cudart_sys", @@ -5712,9 +5701,9 @@ dependencies = [ [[package]] name = "snark_wrapper" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7dd2b167b67a0ec5b2727519bf7c03fcbd43edaa3ebf05a6013fb6fbfed20f6" +checksum = "71aa5bffe5e7daca634bf2fedf0bf566273cb7eae01711d1aa6e5223d36d987d" dependencies = [ "derivative", "rand 0.4.6", @@ -5897,7 +5886,7 @@ dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.5.0", + "bitflags 2.6.0", "byteorder", "bytes", "chrono", @@ -5942,7 +5931,7 @@ dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.5.0", + "bitflags 2.6.0", "byteorder", "chrono", "crc", @@ -6831,13 +6820,13 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "vise" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229baafe01d5177b63c6ee1def80d8e39a2365e64caf69ddb05a57594b15647c" +checksum = "90ade36f3548b1524396f4de7b36f4f210c8a01dfab568eb2bff466af64eb6e5" dependencies = [ "compile-fmt", + "ctor", "elsa", - "linkme", "once_cell", "prometheus-client", "vise-macros", @@ -6845,9 +6834,9 @@ dependencies = [ [[package]] name = "vise-exporter" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23981b18d697026f5430249ab01ba739ef2edc463e400042394331cb2bb63494" +checksum = "671d3b894d5d0849f0a597f56bf071f42d4f2a1cbcf2f78ca21f870ab7c0cc2b" dependencies = [ "hyper 0.14.29", "once_cell", @@ -6858,9 +6847,9 @@ dependencies = [ [[package]] name = "vise-macros" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb19c33cd5f04dcf4e767635e058a998edbc2b7fca32ade0a4a1cea0f8e9b34" +checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ "proc-macro2 1.0.85", "quote 1.0.36", @@ -6873,7 +6862,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", - "circuit_definitions 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", "clap 4.5.4", "hex", "indicatif", @@ -6890,7 +6879,7 @@ dependencies = [ "toml_edit 0.14.4", "tracing", "tracing-subscriber", - "zkevm_test_harness 0.150.2-rc.2", + "zkevm_test_harness 0.150.2-rc.3", "zksync_config", "zksync_env_config", "zksync_prover_fri_types", @@ -7488,9 +7477,9 @@ dependencies = [ [[package]] name = "zkevm_circuits" -version = "0.140.1" +version = "0.140.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806caf4ccbe34ac68193f7d0dd591d1d866d95a740fe45a358eaefd61c357d8e" +checksum = "8beed4cc1ab1f9d99a694506d18705e10059534b30742832be49637c4775e1f8" dependencies = [ "arrayvec 0.7.4", "bincode", @@ -7532,9 +7521,9 @@ dependencies = [ [[package]] name = "zkevm_circuits" -version = "0.150.1" +version = "0.150.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a85c1987a1e7e89f1f8c39ca19bffb61521e719050086372aaea8817f403fc" +checksum = "94d97632ba26e4e6a77a680d6b2bfbcc6f7b9b722976ee31afb922d16a675d45" dependencies = [ "arrayvec 0.7.4", "boojum", @@ -7568,7 +7557,7 @@ version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0769f7b27d8fb06e715da3290c575cac5d04d10a557faef180e847afce50ac4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "blake2 0.10.6", "ethereum-types", "k256 0.11.6", @@ -7583,7 +7572,7 @@ version = "0.141.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6be7bd5f0e0b61211f544147289640b4712715589d7f2fe5229d92a7a3ac64c0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "blake2 0.10.6", "ethereum-types", "k256 0.13.3", @@ -7598,7 +7587,7 @@ version = "0.150.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3328c012d444bdbfadb754a72c01a56879eb66584efc71eac457e89e7843608" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "blake2 0.10.6", "ethereum-types", "k256 0.13.3", @@ -7611,12 +7600,12 @@ dependencies = [ [[package]] name = "zkevm_test_harness" -version = "0.140.0-gpu-wrapper.1" +version = "0.140.1-gpu-wrapper.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dbcf39f7cdd57174c572f227ae46513d2111faed7dc754e300465200e9b791a" +checksum = "bb6b9ca7e42aa17d9bda1209faf166f7c999b583a637a0ce8cefaf3e18e381a3" dependencies = [ "bincode", - "circuit_definitions 0.140.0-gpu-wrapper.1", + "circuit_definitions 0.140.1-gpu-wrapper.1", "codegen", "crossbeam 0.8.4", "derivative", @@ -7635,13 +7624,13 @@ dependencies = [ [[package]] name = "zkevm_test_harness" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2ca25fe64b0dee537ba226983a1f8ca7f8bd4ce82e5d58eb4522252fbe40a6" +checksum = "d584283b3a574f76f7854a7edac51ace2e19596aefd72ebd516264415b798c13" dependencies = [ "bincode", - "circuit_definitions 0.150.2-rc.2", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", + "circuit_sequencer_api 0.150.2-rc.3", "codegen", "crossbeam 0.8.4", "derivative", @@ -7662,9 +7651,9 @@ dependencies = [ [[package]] name = "zksync-gpu-ffi" -version = "0.140.0-gpu-wrapper.1" +version = "0.140.1-gpu-wrapper.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cebf6d2f7102b2b4f3bf14d6dfd8c520f95621b180bc4bbfd07ee53093336205" +checksum = "5b5f8b16cc9cafee49f5cdab6d4f13ebf80bdd1c587b6e7d0b9d30c1944e6246" dependencies = [ "bindgen 0.59.2", "crossbeam 0.8.4", @@ -7676,9 +7665,9 @@ dependencies = [ [[package]] name = "zksync-gpu-prover" -version = "0.140.0-gpu-wrapper.1" +version = "0.140.1-gpu-wrapper.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "502acf81d04b5fad616e01ff7c913285da21ce88d17cd1a083a0d3f25cc27b27" +checksum = "2a1d3928ffae19c41263a5efcea810075282c01c996fa5b5c2bf310b8bca6c45" dependencies = [ "bit-vec", "cfg-if 1.0.0", @@ -7693,12 +7682,12 @@ dependencies = [ [[package]] name = "zksync-wrapper-prover" -version = "0.140.0-gpu-wrapper.1" +version = "0.140.1-gpu-wrapper.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99746d1164f60d788f9701b7146ad2878c92f1ffd463ea5b6d72f9979ac464b7" +checksum = "a93d0d66ca7f3b095123a8bf528c3d3353e8d8f2fcc49a889969832b1f149b55" dependencies = [ - "circuit_definitions 0.140.0-gpu-wrapper.1", - "zkevm_test_harness 0.140.0-gpu-wrapper.1", + "circuit_definitions 0.140.1-gpu-wrapper.1", + "zkevm_test_harness 0.140.1-gpu-wrapper.1", "zksync-gpu-prover", ] @@ -7722,9 +7711,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "209b83578357184ab72af4d4cb2eca76f85e5f2f35d739a47e3fd5931eb9252d" +checksum = "a9f9a4352244ccd5e5fd34fb0d029861a5f57b05c80fe7944a7b532f54c58f89" dependencies = [ "anyhow", "once_cell", @@ -7757,9 +7746,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1abf1f3d9c8109da32a6d5e61a2a64a61b0bff90fdd355992522a4e8a57e69" +checksum = "f8c91270540e8db9479e1eaedaf0e600de468f71ccd5dc7c0258072e743830e6" dependencies = [ "anyhow", "blst", @@ -7781,9 +7770,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e0b04d43a542a3bb1af0ac4c0a17acf6b743607c3cb9028192df0c7d2f5b24" +checksum = "87e79025fd678ec2733add1697645827e9daed3f120c8cebf43513ac17e65b63" dependencies = [ "anyhow", "bit-vec", @@ -7803,9 +7792,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c39f79628bd6685f7ec5561874c007f5d26693d6ba7e5595dfa260981e8f006" +checksum = "470991a42d5f9a3f2385ebe52889e63742d95d141b80b95a1eabe9f51e18cb7e" dependencies = [ "anyhow", "async-trait", @@ -7823,9 +7812,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4514629a34abdf943ef911c16228dfec656edb02d8412db4febd4df5ccf3f91" +checksum = "6c43283b5813fd887e0e7ccaee73c6e41907b1de311a3a01b2fa5f2e3f2ba503" dependencies = [ "anyhow", "rand 0.8.5", @@ -7981,9 +7970,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.2-rc.2" +version = "0.150.2-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c44edd3a3316dcab45aab7e190c96150f2586d4a92fa21f93dcc20178308313a" +checksum = "9235fbdaa98f27b9aacaa861bcb850b6b0dbf37e59477ce3f08c64555a25d00d" dependencies = [ "boojum", "derivative", @@ -7993,7 +7982,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "zkevm_circuits 0.150.1", + "zkevm_circuits 0.150.2", ] [[package]] @@ -8033,7 +8022,7 @@ dependencies = [ "circuit_sequencer_api 0.140.0", "circuit_sequencer_api 0.141.1", "circuit_sequencer_api 0.142.0", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_sequencer_api 0.150.2-rc.3", "hex", "itertools 0.10.5", "once_cell", @@ -8104,7 +8093,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_sequencer_api 0.150.2-rc.3", "clap 4.5.4", "ctrlc", "futures 0.3.30", @@ -8116,7 +8105,7 @@ dependencies = [ "tracing", "vise", "vk_setup_data_generator_server_fri", - "zkevm_test_harness 0.150.2-rc.2", + "zkevm_test_harness 0.150.2-rc.3", "zksync-wrapper-prover", "zksync_config", "zksync_core_leftovers", @@ -8133,9 +8122,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53128384270314bfbd4e044c15138af63cb3a505ab95bb3339f3b866ccbe211c" +checksum = "b5db598a518958b244aed5e3f925c763808429a5ea022bb50957b98e68540495" dependencies = [ "anyhow", "bit-vec", @@ -8154,9 +8143,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d7dfb4dcdd48ab5fa1ccff25f585d73b58cf95e0fb74e96618dd666f198a005" +checksum = "4047ed624c7a19e206125f8259f7e175ad70020beeb66e1975e068af060d2fb5" dependencies = [ "anyhow", "heck 0.5.0", @@ -8204,7 +8193,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "circuit_definitions 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", "clap 4.5.4", "ctrlc", "futures 0.3.30", @@ -8217,7 +8206,7 @@ dependencies = [ "tracing", "vise", "vk_setup_data_generator_server_fri", - "zkevm_test_harness 0.150.2-rc.2", + "zkevm_test_harness 0.150.2-rc.3", "zksync_config", "zksync_core_leftovers", "zksync_env_config", @@ -8261,7 +8250,7 @@ dependencies = [ name = "zksync_prover_fri_types" version = "0.1.0" dependencies = [ - "circuit_definitions 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", "serde", "zksync_object_store", "zksync_types", @@ -8290,7 +8279,7 @@ name = "zksync_prover_interface" version = "0.1.0" dependencies = [ "chrono", - "circuit_sequencer_api 0.150.2-rc.2", + "circuit_sequencer_api 0.150.2-rc.3", "serde", "serde_with", "strum", @@ -8473,7 +8462,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "circuit_definitions 0.150.2-rc.2", + "circuit_definitions 0.150.2-rc.3", "const-decoder", "ctrlc", "futures 0.3.30", @@ -8487,7 +8476,7 @@ dependencies = [ "tracing", "vise", "vk_setup_data_generator_server_fri", - "zkevm_test_harness 0.150.2-rc.2", + "zkevm_test_harness 0.150.2-rc.3", "zksync_config", "zksync_core_leftovers", "zksync_env_config", diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 4dfc77432979..7ab6dd16b99b 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -53,16 +53,16 @@ tokio = "1" toml_edit = "0.14.4" tracing = "0.1" tracing-subscriber = { version = "0.3" } -vise = "0.1.0" +vise = "0.2.0" # Proving dependencies -circuit_definitions = "=0.150.2-rc.2" -circuit_sequencer_api = "=0.150.2-rc.2" -zkevm_test_harness = "=0.150.2-rc.2" +circuit_definitions = "=0.150.2-rc.3" +circuit_sequencer_api = "=0.150.2-rc.3" +zkevm_test_harness = "=0.150.2-rc.3" # GPU proving dependencies -wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.140.0-gpu-wrapper.1" } -shivini = "=0.150.2-rc.2" +wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.140.1-gpu-wrapper.1" } +shivini = "=0.150.2-rc3" # Core workspace dependencies zksync_multivm = { path = "../core/lib/multivm", version = "0.1.0" } diff --git a/prover/crates/bin/proof_fri_compressor/README.md b/prover/crates/bin/proof_fri_compressor/README.md index 097a59e5d09b..c3d5729f40c9 100644 --- a/prover/crates/bin/proof_fri_compressor/README.md +++ b/prover/crates/bin/proof_fri_compressor/README.md @@ -4,4 +4,4 @@ Used to compress FRI proof to Bellman proof that gets sent to L1. ## running -`zk f cargo +nightly-2024-05-07 run --release --bin zksync_proof_fri_compressor` +`zk f cargo +nightly-2024-08-01 run --release --bin zksync_proof_fri_compressor` diff --git a/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs index 6f4946af5b21..1ac6c4f4230d 100644 --- a/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs +++ b/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs @@ -136,7 +136,7 @@ mod tests { for entry in std::fs::read_dir(path_to_input.clone()).unwrap().flatten() { if entry.metadata().unwrap().is_dir() { - let basepath = path_to_input.join(&entry.file_name().into_string().unwrap()); + let basepath = path_to_input.join(entry.file_name()); let keystore = Keystore::new_with_optional_setup_path(basepath.clone(), None); let expected = diff --git a/prover/crates/bin/witness_vector_generator/README.md b/prover/crates/bin/witness_vector_generator/README.md index dde192533db3..3348b294324a 100644 --- a/prover/crates/bin/witness_vector_generator/README.md +++ b/prover/crates/bin/witness_vector_generator/README.md @@ -4,4 +4,4 @@ Used to generate witness vectors using circuit and sending them to prover over T ## running -`zk f cargo +nightly-2024-05-07 run --release --bin zksync_witness_vector_generator` +`zk f cargo +nightly-2024-08-01 run --release --bin zksync_witness_vector_generator` diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs index bc9cde72fde2..488d5b3a5ec9 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs @@ -304,6 +304,7 @@ impl FriWitnessGeneratorDal<'_, '_> { /// - all node aggregation jobs at depth 0 for the batch /// - the recursion tip witness job /// - the scheduler witness job + /// /// NOTE: Not all batches have all circuits, so it's possible we'll be missing some aggregation jobs (for circuits not present in the batch). pub async fn create_aggregation_jobs( &mut self, diff --git a/prover/rust-toolchain b/prover/rust-toolchain index 5aaef38cd79d..03c040b91f1f 100644 --- a/prover/rust-toolchain +++ b/prover/rust-toolchain @@ -1 +1 @@ -nightly-2024-05-07 +nightly-2024-08-01 diff --git a/rust-toolchain b/rust-toolchain index 5aaef38cd79d..03c040b91f1f 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2024-05-07 +nightly-2024-08-01 diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 51d6af249f71..71ac44361179 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -883,6 +883,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "ctor" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +dependencies = [ + "quote", + "syn 2.0.68", +] + [[package]] name = "ctr" version = "0.9.2" @@ -2571,26 +2581,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "linkme" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccb76662d78edc9f9bf56360d6919bdacc8b7761227727e5082f128eeb90bbf5" -dependencies = [ - "linkme-impl", -] - -[[package]] -name = "linkme-impl" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dccda732e04fa3baf2e17cf835bfe2601c7c2edafd64417c627dabae3a8cda" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.68", -] - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -5742,13 +5732,13 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "vise" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229baafe01d5177b63c6ee1def80d8e39a2365e64caf69ddb05a57594b15647c" +checksum = "90ade36f3548b1524396f4de7b36f4f210c8a01dfab568eb2bff466af64eb6e5" dependencies = [ "compile-fmt", + "ctor", "elsa", - "linkme", "once_cell", "prometheus-client", "vise-macros", @@ -5756,9 +5746,9 @@ dependencies = [ [[package]] name = "vise-exporter" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23981b18d697026f5430249ab01ba739ef2edc463e400042394331cb2bb63494" +checksum = "671d3b894d5d0849f0a597f56bf071f42d4f2a1cbcf2f78ca21f870ab7c0cc2b" dependencies = [ "hyper 0.14.29", "once_cell", @@ -5769,9 +5759,9 @@ dependencies = [ [[package]] name = "vise-macros" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb19c33cd5f04dcf4e767635e058a998edbc2b7fca32ade0a4a1cea0f8e9b34" +checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ "proc-macro2", "quote", @@ -6347,9 +6337,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "209b83578357184ab72af4d4cb2eca76f85e5f2f35d739a47e3fd5931eb9252d" +checksum = "a9f9a4352244ccd5e5fd34fb0d029861a5f57b05c80fe7944a7b532f54c58f89" dependencies = [ "anyhow", "once_cell", @@ -6381,9 +6371,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4514629a34abdf943ef911c16228dfec656edb02d8412db4febd4df5ccf3f91" +checksum = "6c43283b5813fd887e0e7ccaee73c6e41907b1de311a3a01b2fa5f2e3f2ba503" dependencies = [ "anyhow", "rand", @@ -6432,9 +6422,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53128384270314bfbd4e044c15138af63cb3a505ab95bb3339f3b866ccbe211c" +checksum = "b5db598a518958b244aed5e3f925c763808429a5ea022bb50957b98e68540495" dependencies = [ "anyhow", "bit-vec", @@ -6453,9 +6443,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.8" +version = "0.1.0-rc.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d7dfb4dcdd48ab5fa1ccff25f585d73b58cf95e0fb74e96618dd666f198a005" +checksum = "4047ed624c7a19e206125f8259f7e175ad70020beeb66e1975e068af060d2fb5" dependencies = [ "anyhow", "heck", diff --git a/zk_toolbox/Cargo.toml b/zk_toolbox/Cargo.toml index 5a08b56cce7d..a8b6633e0360 100644 --- a/zk_toolbox/Cargo.toml +++ b/zk_toolbox/Cargo.toml @@ -30,7 +30,7 @@ types = { path = "crates/types" } zksync_config = { path = "../core/lib/config" } zksync_protobuf_config = { path = "../core/lib/protobuf_config" } zksync_basic_types = { path = "../core/lib/basic_types" } -zksync_protobuf = "=0.1.0-rc.8" +zksync_protobuf = "=0.1.0-rc.9" # External dependencies anyhow = "1.0.82" From f0c85062fac96180c2e0dec52086714ee3783fcf Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 8 Aug 2024 12:56:40 +0400 Subject: [PATCH 2/8] fix(en): Initialize SyncState in OutputHandler (#2618) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ OutputHandler must be added after ExternalIO, because the latter is adding SyncStateResource ## Why ❔ Bug ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/bin/external_node/src/node_builder.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index d71a9f0cdf57..2234e64482db 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -216,8 +216,8 @@ impl ExternalNodeBuilder { rocksdb_options, ); self.node - .add_layer(persistence_layer) .add_layer(io_layer) + .add_layer(persistence_layer) .add_layer(main_node_batch_executor_builder_layer) .add_layer(state_keeper_layer); Ok(self) From 7ae07e446c9732a896ca8246d324e82c6e6d5a46 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 8 Aug 2024 14:59:57 +0400 Subject: [PATCH 3/8] feat(vlog): Expose more resource values via opentelemetry (#2620) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Exposes cluster name and deployment environment in opentelemetry tracing/logs. Used env vars: - `CLUSTER_NAME` - `DEPLOYMENT_ENVIRONMENT` ## Why ❔ Allows collecting data from multiple envs to a single source and then filtering based on resource attributes. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/lib/vlog/src/opentelemetry/mod.rs | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/core/lib/vlog/src/opentelemetry/mod.rs b/core/lib/vlog/src/opentelemetry/mod.rs index d55680cd0a50..c0b72b802de8 100644 --- a/core/lib/vlog/src/opentelemetry/mod.rs +++ b/core/lib/vlog/src/opentelemetry/mod.rs @@ -8,7 +8,7 @@ use opentelemetry_sdk::{ Resource, }; use opentelemetry_semantic_conventions::resource::{ - K8S_NAMESPACE_NAME, K8S_POD_NAME, SERVICE_NAME, + DEPLOYMENT_ENVIRONMENT, K8S_CLUSTER_NAME, K8S_NAMESPACE_NAME, K8S_POD_NAME, SERVICE_NAME, }; use tracing_subscriber::{registry::LookupSpan, EnvFilter, Layer}; use url::Url; @@ -27,6 +27,11 @@ pub struct ServiceDescriptor { pub k8s_pod_name: String, /// Name of the k8s namespace. pub k8s_namespace_name: String, + /// Name of the k8s cluster. + pub k8s_cluster_name: String, + /// Name of the deployment environment. + /// Note that the single deployment environment can be spread among multiple clusters. + pub deployment_environment: String, /// Name of the service. pub service_name: String, } @@ -42,12 +47,20 @@ impl ServiceDescriptor { pub const K8S_POD_NAME_ENV_VAR: &'static str = "POD_NAME"; /// Environment variable to fetch the k8s namespace name. pub const K8S_NAMESPACE_NAME_ENV_VAR: &'static str = "POD_NAMESPACE"; + /// Environment variable to fetch the k8s cluster name. + pub const K8S_CLUSTER_NAME_ENV_VAR: &'static str = "CLUSTER_NAME"; + /// Environment variable to fetch the deployment environment. + pub const DEPLOYMENT_ENVIRONMENT_ENV_VAR: &'static str = "DEPLOYMENT_ENVIRONMENT"; /// Environment variable to fetch the service name. pub const SERVICE_NAME_ENV_VAR: &'static str = "SERVICE_NAME"; /// Default value for the k8s pod name. pub const DEFAULT_K8S_POD_NAME: &'static str = "zksync-0"; /// Default value for the k8s namespace name. pub const DEFAULT_K8S_NAMESPACE_NAME: &'static str = "local"; + /// Default value for the k8s cluster name. + pub const DEFAULT_K8S_CLUSTER_NAME: &'static str = "local"; + /// Default value for the deployment environment. + pub const DEFAULT_DEPLOYMENT_ENVIRONMENT: &'static str = "local"; /// Default value for the service name. pub const DEFAULT_SERVICE_NAME: &'static str = "zksync"; @@ -64,6 +77,14 @@ impl ServiceDescriptor { Self::K8S_NAMESPACE_NAME_ENV_VAR, Self::DEFAULT_K8S_NAMESPACE_NAME, ), + k8s_cluster_name: env_or( + Self::K8S_CLUSTER_NAME_ENV_VAR, + Self::DEFAULT_K8S_CLUSTER_NAME, + ), + deployment_environment: env_or( + Self::DEPLOYMENT_ENVIRONMENT_ENV_VAR, + Self::DEFAULT_DEPLOYMENT_ENVIRONMENT, + ), service_name: env_or(Self::SERVICE_NAME_ENV_VAR, Self::DEFAULT_SERVICE_NAME), } } @@ -93,6 +114,8 @@ impl ServiceDescriptor { let attributes = vec![ KeyValue::new(K8S_POD_NAME, self.k8s_pod_name), KeyValue::new(K8S_NAMESPACE_NAME, self.k8s_namespace_name), + KeyValue::new(K8S_CLUSTER_NAME, self.k8s_cluster_name), + KeyValue::new(DEPLOYMENT_ENVIRONMENT, self.deployment_environment), KeyValue::new(SERVICE_NAME, self.service_name), ]; Resource::new(attributes) From aff7b6535ef92aaced0dd7fa1cc08d656cba027e Mon Sep 17 00:00:00 2001 From: perekopskiy <53865202+perekopskiy@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:07:15 +0300 Subject: [PATCH 4/8] feat(node-framework): Add API fee params resource (#2621) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ - adds `ApiFeeInputResource` - moves `GasAdjuster` to resources ## Why ❔ - `GasAdjuster` is logically a resource - Sequencer and API should use different fee params providers. I think it was missed when migrating to the framework ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- core/bin/zksync_server/src/node_builder.rs | 36 +++--- .../layers/eth_sender/manager.rs | 6 +- .../implementations/layers/gas_adjuster.rs | 107 ++++++++++++++++++ .../src/implementations/layers/l1_gas.rs | 102 +++++------------ .../layers/main_node_fee_params_fetcher.rs | 9 +- .../src/implementations/layers/mod.rs | 1 + .../layers/state_keeper/mempool_io.rs | 4 +- .../layers/web3_api/tx_sender.rs | 4 +- .../implementations/resources/fee_input.rs | 26 ++++- .../implementations/resources/gas_adjuster.rs | 21 ++++ .../src/implementations/resources/mod.rs | 1 + 11 files changed, 214 insertions(+), 103 deletions(-) create mode 100644 core/node/node_framework/src/implementations/layers/gas_adjuster.rs create mode 100644 core/node/node_framework/src/implementations/resources/gas_adjuster.rs diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index f504fa0eaebb..7b83587a29eb 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -31,10 +31,11 @@ use zksync_node_framework::{ da_dispatcher::DataAvailabilityDispatcherLayer, eth_sender::{EthTxAggregatorLayer, EthTxManagerLayer}, eth_watch::EthWatchLayer, + gas_adjuster::GasAdjusterLayer, healtcheck_server::HealthCheckLayer, house_keeper::HouseKeeperLayer, l1_batch_commitment_mode_validation::L1BatchCommitmentModeValidationLayer, - l1_gas::SequencerL1GasLayer, + l1_gas::L1GasLayer, metadata_calculator::MetadataCalculatorLayer, node_storage_init::{ main_node_strategy::MainNodeInitStrategyLayer, NodeStorageInitializerLayer, @@ -157,26 +158,30 @@ impl MainNodeBuilder { Ok(self) } - fn add_sequencer_l1_gas_layer(mut self) -> anyhow::Result { - // Ensure the BaseTokenRatioProviderResource is inserted if the base token is not ETH. - if self.contracts_config.base_token_addr != Some(SHARED_BRIDGE_ETHER_TOKEN_ADDRESS) { - let base_token_adjuster_config = try_load_config!(self.configs.base_token_adjuster); - self.node - .add_layer(BaseTokenRatioProviderLayer::new(base_token_adjuster_config)); - } - + fn add_gas_adjuster_layer(mut self) -> anyhow::Result { let gas_adjuster_config = try_load_config!(self.configs.eth) .gas_adjuster .context("Gas adjuster")?; - let state_keeper_config = try_load_config!(self.configs.state_keeper_config); let eth_sender_config = try_load_config!(self.configs.eth); - let sequencer_l1_gas_layer = SequencerL1GasLayer::new( + let gas_adjuster_layer = GasAdjusterLayer::new( gas_adjuster_config, self.genesis_config.clone(), - state_keeper_config, try_load_config!(eth_sender_config.sender).pubdata_sending_mode, ); - self.node.add_layer(sequencer_l1_gas_layer); + self.node.add_layer(gas_adjuster_layer); + Ok(self) + } + + fn add_l1_gas_layer(mut self) -> anyhow::Result { + // Ensure the BaseTokenRatioProviderResource is inserted if the base token is not ETH. + if self.contracts_config.base_token_addr != Some(SHARED_BRIDGE_ETHER_TOKEN_ADDRESS) { + let base_token_adjuster_config = try_load_config!(self.configs.base_token_adjuster); + self.node + .add_layer(BaseTokenRatioProviderLayer::new(base_token_adjuster_config)); + } + let state_keeper_config = try_load_config!(self.configs.state_keeper_config); + let l1_gas_layer = L1GasLayer::new(state_keeper_config); + self.node.add_layer(l1_gas_layer); Ok(self) } @@ -614,7 +619,7 @@ impl MainNodeBuilder { .add_healthcheck_layer()? .add_prometheus_exporter_layer()? .add_query_eth_client_layer()? - .add_sequencer_l1_gas_layer()?; + .add_gas_adjuster_layer()?; // Add preconditions for all the components. self = self @@ -637,11 +642,13 @@ impl MainNodeBuilder { // State keeper is the core component of the sequencer, // which is why we consider it to be responsible for the storage initialization. self = self + .add_l1_gas_layer()? .add_storage_initialization_layer(LayerKind::Task)? .add_state_keeper_layer()?; } Component::HttpApi => { self = self + .add_l1_gas_layer()? .add_tx_sender_layer()? .add_tree_api_client_layer()? .add_api_caches_layer()? @@ -649,6 +656,7 @@ impl MainNodeBuilder { } Component::WsApi => { self = self + .add_l1_gas_layer()? .add_tx_sender_layer()? .add_tree_api_client_layer()? .add_api_caches_layer()? diff --git a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs index e979c372d8e8..b5f8ee423138 100644 --- a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs +++ b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs @@ -7,7 +7,7 @@ use crate::{ implementations::resources::{ circuit_breakers::CircuitBreakersResource, eth_interface::{BoundEthInterfaceForBlobsResource, BoundEthInterfaceResource}, - l1_tx_params::L1TxParamsResource, + gas_adjuster::GasAdjusterResource, pools::{MasterPool, PoolResource, ReplicaPool}, }, service::StopReceiver, @@ -45,7 +45,7 @@ pub struct Input { pub replica_pool: PoolResource, pub eth_client: BoundEthInterfaceResource, pub eth_client_blobs: Option, - pub l1_tx_params: L1TxParamsResource, + pub gas_adjuster: GasAdjusterResource, #[context(default)] pub circuit_breakers: CircuitBreakersResource, } @@ -82,7 +82,7 @@ impl WiringLayer for EthTxManagerLayer { let config = self.eth_sender_config.sender.context("sender")?; - let gas_adjuster = input.l1_tx_params.0; + let gas_adjuster = input.gas_adjuster.0; let eth_tx_manager = EthTxManager::new( master_pool, diff --git a/core/node/node_framework/src/implementations/layers/gas_adjuster.rs b/core/node/node_framework/src/implementations/layers/gas_adjuster.rs new file mode 100644 index 000000000000..71a4e56bbbae --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/gas_adjuster.rs @@ -0,0 +1,107 @@ +use std::sync::Arc; + +use anyhow::Context; +use zksync_config::{configs::eth_sender::PubdataSendingMode, GasAdjusterConfig, GenesisConfig}; +use zksync_node_fee_model::l1_gas_price::GasAdjuster; + +use crate::{ + implementations::resources::{ + eth_interface::EthInterfaceResource, gas_adjuster::GasAdjusterResource, + }, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for sequencer L1 gas interfaces. +/// Adds several resources that depend on L1 gas price. +#[derive(Debug)] +pub struct GasAdjusterLayer { + gas_adjuster_config: GasAdjusterConfig, + genesis_config: GenesisConfig, + pubdata_sending_mode: PubdataSendingMode, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub eth_client: EthInterfaceResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + pub gas_adjuster: GasAdjusterResource, + /// Only runs if someone uses the resources listed above. + #[context(task)] + pub gas_adjuster_task: GasAdjusterTask, +} + +impl GasAdjusterLayer { + pub fn new( + gas_adjuster_config: GasAdjusterConfig, + genesis_config: GenesisConfig, + pubdata_sending_mode: PubdataSendingMode, + ) -> Self { + Self { + gas_adjuster_config, + genesis_config, + pubdata_sending_mode, + } + } +} + +#[async_trait::async_trait] +impl WiringLayer for GasAdjusterLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "gas_adjuster_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let client = input.eth_client.0; + let adjuster = GasAdjuster::new( + client, + self.gas_adjuster_config, + self.pubdata_sending_mode, + self.genesis_config.l1_batch_commit_data_generator_mode, + ) + .await + .context("GasAdjuster::new()")?; + let gas_adjuster = Arc::new(adjuster); + + Ok(Output { + gas_adjuster: gas_adjuster.clone().into(), + gas_adjuster_task: GasAdjusterTask { gas_adjuster }, + }) + } +} + +#[derive(Debug)] +pub struct GasAdjusterTask { + gas_adjuster: Arc, +} + +#[async_trait::async_trait] +impl Task for GasAdjusterTask { + fn id(&self) -> TaskId { + "gas_adjuster".into() + } + + async fn run(self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { + // Gas adjuster layer is added to provide a resource for anyone to use, but it comes with + // a support task. If nobody has used the resource, we don't need to run the support task. + if Arc::strong_count(&self.gas_adjuster) == 1 { + tracing::info!( + "Gas adjuster is not used by any other task, not running the support task" + ); + stop_receiver.0.changed().await?; + return Ok(()); + } + + self.gas_adjuster.run(stop_receiver.0).await + } +} diff --git a/core/node/node_framework/src/implementations/layers/l1_gas.rs b/core/node/node_framework/src/implementations/layers/l1_gas.rs index 85e0422cdcb1..9a4ccb8264f6 100644 --- a/core/node/node_framework/src/implementations/layers/l1_gas.rs +++ b/core/node/node_framework/src/implementations/layers/l1_gas.rs @@ -1,39 +1,33 @@ use std::sync::Arc; -use anyhow::Context; -use zksync_config::{ - configs::{chain::StateKeeperConfig, eth_sender::PubdataSendingMode}, - GasAdjusterConfig, GenesisConfig, -}; -use zksync_node_fee_model::{l1_gas_price::GasAdjuster, MainNodeFeeInputProvider}; +use zksync_config::configs::chain::StateKeeperConfig; +use zksync_node_fee_model::{ApiFeeInputProvider, MainNodeFeeInputProvider}; use zksync_types::fee_model::FeeModelConfig; use crate::{ implementations::resources::{ base_token_ratio_provider::BaseTokenRatioProviderResource, - eth_interface::EthInterfaceResource, fee_input::FeeInputResource, + fee_input::{ApiFeeInputResource, SequencerFeeInputResource}, + gas_adjuster::GasAdjusterResource, l1_tx_params::L1TxParamsResource, + pools::{PoolResource, ReplicaPool}, }, - service::StopReceiver, - task::{Task, TaskId}, wiring_layer::{WiringError, WiringLayer}, FromContext, IntoContext, }; -/// Wiring layer for sequencer L1 gas interfaces. +/// Wiring layer for L1 gas interfaces. /// Adds several resources that depend on L1 gas price. #[derive(Debug)] -pub struct SequencerL1GasLayer { - gas_adjuster_config: GasAdjusterConfig, - genesis_config: GenesisConfig, - pubdata_sending_mode: PubdataSendingMode, +pub struct L1GasLayer { state_keeper_config: StateKeeperConfig, } #[derive(Debug, FromContext)] #[context(crate = crate)] pub struct Input { - pub eth_client: EthInterfaceResource, + pub gas_adjuster: GasAdjusterResource, + pub replica_pool: PoolResource, /// If not provided, the base token assumed to be ETH, and the ratio will be constant. #[context(default)] pub base_token_ratio_provider: BaseTokenRatioProviderResource, @@ -42,87 +36,47 @@ pub struct Input { #[derive(Debug, IntoContext)] #[context(crate = crate)] pub struct Output { - pub fee_input: FeeInputResource, + pub sequencer_fee_input: SequencerFeeInputResource, + pub api_fee_input: ApiFeeInputResource, pub l1_tx_params: L1TxParamsResource, - /// Only runs if someone uses the resources listed above. - #[context(task)] - pub gas_adjuster_task: GasAdjusterTask, } -impl SequencerL1GasLayer { - pub fn new( - gas_adjuster_config: GasAdjusterConfig, - genesis_config: GenesisConfig, - state_keeper_config: StateKeeperConfig, - pubdata_sending_mode: PubdataSendingMode, - ) -> Self { +impl L1GasLayer { + pub fn new(state_keeper_config: StateKeeperConfig) -> Self { Self { - gas_adjuster_config, - genesis_config, - pubdata_sending_mode, state_keeper_config, } } } #[async_trait::async_trait] -impl WiringLayer for SequencerL1GasLayer { +impl WiringLayer for L1GasLayer { type Input = Input; type Output = Output; fn layer_name(&self) -> &'static str { - "sequencer_l1_gas_layer" + "l1_gas_layer" } async fn wire(self, input: Self::Input) -> Result { - let client = input.eth_client.0; - let adjuster = GasAdjuster::new( - client, - self.gas_adjuster_config, - self.pubdata_sending_mode, - self.genesis_config.l1_batch_commit_data_generator_mode, - ) - .await - .context("GasAdjuster::new()")?; - let gas_adjuster = Arc::new(adjuster); - let ratio_provider = input.base_token_ratio_provider; - let batch_fee_input_provider = Arc::new(MainNodeFeeInputProvider::new( - gas_adjuster.clone(), - ratio_provider.0.clone(), + let main_fee_input_provider = Arc::new(MainNodeFeeInputProvider::new( + input.gas_adjuster.0.clone(), + ratio_provider.0, FeeModelConfig::from_state_keeper_config(&self.state_keeper_config), )); - Ok(Output { - fee_input: batch_fee_input_provider.into(), - l1_tx_params: gas_adjuster.clone().into(), - gas_adjuster_task: GasAdjusterTask { gas_adjuster }, - }) - } -} - -#[derive(Debug)] -pub struct GasAdjusterTask { - gas_adjuster: Arc, -} - -#[async_trait::async_trait] -impl Task for GasAdjusterTask { - fn id(&self) -> TaskId { - "gas_adjuster".into() - } - async fn run(self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { - // Gas adjuster layer is added to provide a resource for anyone to use, but it comes with - // a support task. If nobody has used the resource, we don't need to run the support task. - if Arc::strong_count(&self.gas_adjuster) == 1 { - tracing::info!( - "Gas adjuster is not used by any other task, not running the support task" - ); - stop_receiver.0.changed().await?; - return Ok(()); - } + let replica_pool = input.replica_pool.get().await?; + let api_fee_input_provider = Arc::new(ApiFeeInputProvider::new( + main_fee_input_provider.clone(), + replica_pool, + )); - self.gas_adjuster.run(stop_receiver.0).await + Ok(Output { + sequencer_fee_input: main_fee_input_provider.into(), + api_fee_input: api_fee_input_provider.into(), + l1_tx_params: input.gas_adjuster.0.into(), + }) } } diff --git a/core/node/node_framework/src/implementations/layers/main_node_fee_params_fetcher.rs b/core/node/node_framework/src/implementations/layers/main_node_fee_params_fetcher.rs index 848dd4464387..6e7df3b47e42 100644 --- a/core/node/node_framework/src/implementations/layers/main_node_fee_params_fetcher.rs +++ b/core/node/node_framework/src/implementations/layers/main_node_fee_params_fetcher.rs @@ -4,7 +4,8 @@ use zksync_node_fee_model::l1_gas_price::MainNodeFeeParamsFetcher; use crate::{ implementations::resources::{ - fee_input::FeeInputResource, main_node_client::MainNodeClientResource, + fee_input::{ApiFeeInputResource, SequencerFeeInputResource}, + main_node_client::MainNodeClientResource, }, service::StopReceiver, task::{Task, TaskId}, @@ -26,7 +27,8 @@ pub struct Input { #[derive(Debug, IntoContext)] #[context(crate = crate)] pub struct Output { - pub fee_input: FeeInputResource, + pub sequencer_fee_input: SequencerFeeInputResource, + pub api_fee_input: ApiFeeInputResource, #[context(task)] pub fetcher: MainNodeFeeParamsFetcherTask, } @@ -44,7 +46,8 @@ impl WiringLayer for MainNodeFeeParamsFetcherLayer { let MainNodeClientResource(main_node_client) = input.main_node_client; let fetcher = Arc::new(MainNodeFeeParamsFetcher::new(main_node_client)); Ok(Output { - fee_input: fetcher.clone().into(), + sequencer_fee_input: fetcher.clone().into(), + api_fee_input: fetcher.clone().into(), fetcher: MainNodeFeeParamsFetcherTask { fetcher }, }) } diff --git a/core/node/node_framework/src/implementations/layers/mod.rs b/core/node/node_framework/src/implementations/layers/mod.rs index 55bc0a40ca73..99e50be8ae5d 100644 --- a/core/node/node_framework/src/implementations/layers/mod.rs +++ b/core/node/node_framework/src/implementations/layers/mod.rs @@ -9,6 +9,7 @@ pub mod contract_verification_api; pub mod da_dispatcher; pub mod eth_sender; pub mod eth_watch; +pub mod gas_adjuster; pub mod healtcheck_server; pub mod house_keeper; pub mod l1_batch_commitment_mode_validation; diff --git a/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs b/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs index 6be6544ee3df..ec2c415b9bbd 100644 --- a/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs +++ b/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs @@ -8,7 +8,7 @@ use zksync_types::L2ChainId; use crate::{ implementations::resources::{ - fee_input::FeeInputResource, + fee_input::SequencerFeeInputResource, pools::{MasterPool, PoolResource}, state_keeper::{ConditionalSealerResource, StateKeeperIOResource}, }, @@ -44,7 +44,7 @@ pub struct MempoolIOLayer { #[derive(Debug, FromContext)] #[context(crate = crate)] pub struct Input { - pub fee_input: FeeInputResource, + pub fee_input: SequencerFeeInputResource, pub master_pool: PoolResource, } diff --git a/core/node/node_framework/src/implementations/layers/web3_api/tx_sender.rs b/core/node/node_framework/src/implementations/layers/web3_api/tx_sender.rs index 4ece9b024300..3574b8e8c24c 100644 --- a/core/node/node_framework/src/implementations/layers/web3_api/tx_sender.rs +++ b/core/node/node_framework/src/implementations/layers/web3_api/tx_sender.rs @@ -15,7 +15,7 @@ use zksync_web3_decl::{ use crate::{ implementations::resources::{ - fee_input::FeeInputResource, + fee_input::ApiFeeInputResource, main_node_client::MainNodeClientResource, pools::{PoolResource, ReplicaPool}, state_keeper::ConditionalSealerResource, @@ -67,7 +67,7 @@ pub struct TxSenderLayer { pub struct Input { pub tx_sink: TxSinkResource, pub replica_pool: PoolResource, - pub fee_input: FeeInputResource, + pub fee_input: ApiFeeInputResource, pub main_node_client: Option, pub sealer: Option, } diff --git a/core/node/node_framework/src/implementations/resources/fee_input.rs b/core/node/node_framework/src/implementations/resources/fee_input.rs index 10271977bac7..1553df1ee5bd 100644 --- a/core/node/node_framework/src/implementations/resources/fee_input.rs +++ b/core/node/node_framework/src/implementations/resources/fee_input.rs @@ -4,17 +4,33 @@ use zksync_node_fee_model::BatchFeeModelInputProvider; use crate::resource::Resource; -/// A resource that provides [`BatchFeeModelInputProvider`] implementation to the service. +/// A resource that provides [`BatchFeeModelInputProvider`] implementation to the service and is used by sequencer. #[derive(Debug, Clone)] -pub struct FeeInputResource(pub Arc); +pub struct SequencerFeeInputResource(pub Arc); -impl Resource for FeeInputResource { +impl Resource for SequencerFeeInputResource { fn name() -> String { - "common/fee_input".into() + "common/sequencer_fee_input".into() } } -impl From> for FeeInputResource { +impl From> for SequencerFeeInputResource { + fn from(provider: Arc) -> Self { + Self(provider) + } +} + +/// A resource that provides [`BatchFeeModelInputProvider`] implementation to the service and is used by API. +#[derive(Debug, Clone)] +pub struct ApiFeeInputResource(pub Arc); + +impl Resource for ApiFeeInputResource { + fn name() -> String { + "common/api_fee_input".into() + } +} + +impl From> for ApiFeeInputResource { fn from(provider: Arc) -> Self { Self(provider) } diff --git a/core/node/node_framework/src/implementations/resources/gas_adjuster.rs b/core/node/node_framework/src/implementations/resources/gas_adjuster.rs new file mode 100644 index 000000000000..ff135c715cc3 --- /dev/null +++ b/core/node/node_framework/src/implementations/resources/gas_adjuster.rs @@ -0,0 +1,21 @@ +use std::sync::Arc; + +use zksync_node_fee_model::l1_gas_price::GasAdjuster; + +use crate::resource::Resource; + +/// A resource that provides [`GasAdjuster`] to the service. +#[derive(Debug, Clone)] +pub struct GasAdjusterResource(pub Arc); + +impl Resource for GasAdjusterResource { + fn name() -> String { + "common/gas_adjuster".into() + } +} + +impl From> for GasAdjusterResource { + fn from(gas_adjuster: Arc) -> Self { + Self(gas_adjuster) + } +} diff --git a/core/node/node_framework/src/implementations/resources/mod.rs b/core/node/node_framework/src/implementations/resources/mod.rs index 4f82f4c3a911..e7f581c77093 100644 --- a/core/node/node_framework/src/implementations/resources/mod.rs +++ b/core/node/node_framework/src/implementations/resources/mod.rs @@ -4,6 +4,7 @@ pub mod circuit_breakers; pub mod da_client; pub mod eth_interface; pub mod fee_input; +pub mod gas_adjuster; pub mod healthcheck; pub mod l1_tx_params; pub mod main_node_client; From e23e6611731835ef3abd34f3f9867f9dc533eb21 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 8 Aug 2024 17:11:35 +0400 Subject: [PATCH 5/8] feat(vlog): Report observability config, flush, and shutdown (#2622) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Adds more logs for observability lifecycle. ## Why ❔ Easier to debug issues. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- Cargo.lock | 1 + core/lib/config/Cargo.toml | 3 ++- core/lib/config/src/observability_ext.rs | 3 +++ core/lib/vlog/src/lib.rs | 8 ++++++++ prover/Cargo.lock | 1 + 5 files changed, 15 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 8446831eca71..85ea14290a13 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8157,6 +8157,7 @@ dependencies = [ "rand 0.8.5", "secrecy", "serde", + "tracing", "url", "zksync_basic_types", "zksync_concurrency", diff --git a/core/lib/config/Cargo.toml b/core/lib/config/Cargo.toml index 784bdebfef07..b13948448cdd 100644 --- a/core/lib/config/Cargo.toml +++ b/core/lib/config/Cargo.toml @@ -16,6 +16,7 @@ zksync_crypto_primitives.workspace = true zksync_consensus_utils.workspace = true zksync_concurrency.workspace = true zksync_vlog = { workspace = true, optional = true } +tracing = { workspace = true, optional = true } url.workspace = true anyhow.workspace = true @@ -25,4 +26,4 @@ serde = { workspace = true, features = ["derive"] } [features] default = [] -observability_ext = ["zksync_vlog"] +observability_ext = ["zksync_vlog", "tracing"] diff --git a/core/lib/config/src/observability_ext.rs b/core/lib/config/src/observability_ext.rs index 641b095eb3b9..1370978b5807 100644 --- a/core/lib/config/src/observability_ext.rs +++ b/core/lib/config/src/observability_ext.rs @@ -16,6 +16,9 @@ impl ObservabilityConfig { .with_sentry(sentry) .with_opentelemetry(opentelemetry) .build(); + + tracing::info!("Installed observability stack with the following configuration: {self:?}"); + Ok(guard) } } diff --git a/core/lib/vlog/src/lib.rs b/core/lib/vlog/src/lib.rs index 390b7782c584..268fbd0b39eb 100644 --- a/core/lib/vlog/src/lib.rs +++ b/core/lib/vlog/src/lib.rs @@ -42,6 +42,7 @@ impl ObservabilityGuard { if let Some(sentry_guard) = &self.sentry_guard { sentry_guard.flush(Some(FLUSH_TIMEOUT)); + tracing::info!("Sentry events are flushed"); } if let Some(provider) = &self.otlp_tracing_provider { @@ -50,6 +51,7 @@ impl ObservabilityGuard { tracing::warn!("Flushing the spans failed: {err:?}"); } } + tracing::info!("Spans are flushed"); } if let Some(provider) = &self.otlp_logging_provider { @@ -58,6 +60,7 @@ impl ObservabilityGuard { tracing::warn!("Flushing the logs failed: {err:?}"); } } + tracing::info!("Logs are flushed"); } } @@ -70,15 +73,20 @@ impl ObservabilityGuard { // `take` here and below ensures that we don't have any access to the deinitialized resources. if let Some(sentry_guard) = self.sentry_guard.take() { sentry_guard.close(Some(SHUTDOWN_TIMEOUT)); + tracing::info!("Sentry client is shut down"); } if let Some(provider) = self.otlp_tracing_provider.take() { if let Err(err) = provider.shutdown() { tracing::warn!("Shutting down the OTLP tracing provider failed: {err:?}"); + } else { + tracing::info!("OTLP tracing provider is shut down"); } } if let Some(provider) = self.otlp_logging_provider.take() { if let Err(err) = provider.shutdown() { tracing::warn!("Shutting down the OTLP logs provider failed: {err:?}"); + } else { + tracing::info!("OTLP logs provider is shut down"); } } } diff --git a/prover/Cargo.lock b/prover/Cargo.lock index a7400fc2490e..4284ee998940 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -7736,6 +7736,7 @@ dependencies = [ "rand 0.8.5", "secrecy", "serde", + "tracing", "url", "zksync_basic_types", "zksync_concurrency", From 129a1819262d64a36d651af01fdab93c5ff91712 Mon Sep 17 00:00:00 2001 From: Artem Fomiuk <88630083+Artemka374@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:21:29 +0300 Subject: [PATCH 6/8] feat: External prover API (#2538) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Adds prover API component, which allows to get proof generation data and verify final proofs against generated by our prover subsystem. ## Why ❔ First step of prover decentralization. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --------- Co-authored-by: EmilLuta --- Cargo.lock | 17 ++ Cargo.toml | 3 +- core/bin/zksync_server/src/main.rs | 5 +- core/bin/zksync_server/src/node_builder.rs | 14 ++ .../configs/external_proof_integration_api.rs | 6 + core/lib/config/src/configs/general.rs | 5 +- core/lib/config/src/configs/mod.rs | 2 + core/lib/config/src/lib.rs | 4 +- core/lib/config/src/testonly.rs | 14 ++ ...fd52908e6748be50524871ff40e3301fecab1.json | 20 ++ core/lib/dal/src/proof_generation_dal.rs | 23 +++ .../src/external_proof_integration_api.rs | 35 ++++ core/lib/env_config/src/lib.rs | 1 + .../src/external_proof_integration_api.rs | 22 ++ core/lib/protobuf_config/src/general.rs | 7 + core/lib/protobuf_config/src/lib.rs | 1 + .../external_proof_integration_api.proto | 7 + .../src/proto/config/general.proto | 62 +++--- core/lib/prover_interface/src/api.rs | 8 +- core/lib/zksync_core_leftovers/src/lib.rs | 5 + .../src/temp_config_store/mod.rs | 9 +- .../external_proof_integration_api/Cargo.toml | 23 +++ .../src/error.rs | 86 ++++++++ .../external_proof_integration_api/src/lib.rs | 73 +++++++ .../src/processor.rs | 190 ++++++++++++++++++ core/node/node_framework/Cargo.toml | 1 + .../layers/external_proof_integration_api.rs | 100 +++++++++ .../src/implementations/layers/mod.rs | 1 + .../base/external_proof_integration_api.toml | 2 + etc/env/file_based/general.yaml | 9 +- 30 files changed, 712 insertions(+), 43 deletions(-) create mode 100644 core/lib/config/src/configs/external_proof_integration_api.rs create mode 100644 core/lib/dal/.sqlx/query-782726c01284ded3905c312262afd52908e6748be50524871ff40e3301fecab1.json create mode 100644 core/lib/env_config/src/external_proof_integration_api.rs create mode 100644 core/lib/protobuf_config/src/external_proof_integration_api.rs create mode 100644 core/lib/protobuf_config/src/proto/config/external_proof_integration_api.proto create mode 100644 core/node/external_proof_integration_api/Cargo.toml create mode 100644 core/node/external_proof_integration_api/src/error.rs create mode 100644 core/node/external_proof_integration_api/src/lib.rs create mode 100644 core/node/external_proof_integration_api/src/processor.rs create mode 100644 core/node/node_framework/src/implementations/layers/external_proof_integration_api.rs create mode 100644 etc/env/base/external_proof_integration_api.toml diff --git a/Cargo.lock b/Cargo.lock index 85ea14290a13..88b5204d317a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8725,6 +8725,22 @@ dependencies = [ "zksync_types", ] +[[package]] +name = "zksync_external_proof_integration_api" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum", + "bincode", + "tokio", + "tracing", + "zksync_basic_types", + "zksync_config", + "zksync_dal", + "zksync_object_store", + "zksync_prover_interface", +] + [[package]] name = "zksync_health_check" version = "0.1.0" @@ -9074,6 +9090,7 @@ dependencies = [ "zksync_eth_sender", "zksync_eth_watch", "zksync_external_price_api", + "zksync_external_proof_integration_api", "zksync_health_check", "zksync_house_keeper", "zksync_metadata_calculator", diff --git a/Cargo.toml b/Cargo.toml index 691ce1c4c3ff..20e24bff044e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,6 +36,7 @@ members = [ "core/node/api_server", "core/node/tee_verifier_input_producer", "core/node/base_token_adjuster", + "core/node/external_proof_integration_api", # Libraries "core/lib/db_connection", "core/lib/zksync_core_leftovers", @@ -77,7 +78,6 @@ members = [ "core/tests/loadnext", "core/tests/vm-benchmark", "core/tests/vm-benchmark/harness", - # Parts of prover workspace that are needed for Core workspace "prover/crates/lib/prover_dal", ] @@ -282,6 +282,7 @@ zksync_eth_sender = { version = "0.1.0", path = "core/node/eth_sender" } zksync_node_db_pruner = { version = "0.1.0", path = "core/node/db_pruner" } zksync_node_fee_model = { version = "0.1.0", path = "core/node/fee_model" } zksync_vm_runner = { version = "0.1.0", path = "core/node/vm_runner" } +zksync_external_proof_integration_api = { version = "0.1.0", path = "core/node/external_proof_integration_api" } zksync_node_test_utils = { version = "0.1.0", path = "core/node/test_utils" } zksync_state_keeper = { version = "0.1.0", path = "core/node/state_keeper" } zksync_reorg_detector = { version = "0.1.0", path = "core/node/reorg_detector" } diff --git a/core/bin/zksync_server/src/main.rs b/core/bin/zksync_server/src/main.rs index a7fee61c8a01..b1d522171fe1 100644 --- a/core/bin/zksync_server/src/main.rs +++ b/core/bin/zksync_server/src/main.rs @@ -18,8 +18,8 @@ use zksync_config::{ ProtectiveReadsWriterConfig, Secrets, }, ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, DADispatcherConfig, DBConfig, - EthConfig, EthWatchConfig, GasAdjusterConfig, GenesisConfig, ObjectStoreConfig, PostgresConfig, - SnapshotsCreatorConfig, + EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, GenesisConfig, + ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, }; use zksync_core_leftovers::{ temp_config_store::{decode_yaml_repr, TempConfigStore}, @@ -208,5 +208,6 @@ fn load_env_config() -> anyhow::Result { pruning: None, snapshot_recovery: None, external_price_api_client_config: ExternalPriceApiClientConfig::from_env().ok(), + external_proof_integration_api_config: ExternalProofIntegrationApiConfig::from_env().ok(), }) } diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index 7b83587a29eb..648050a08ebc 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -31,6 +31,7 @@ use zksync_node_framework::{ da_dispatcher::DataAvailabilityDispatcherLayer, eth_sender::{EthTxAggregatorLayer, EthTxManagerLayer}, eth_watch::EthWatchLayer, + external_proof_integration_api::ExternalProofIntegrationApiLayer, gas_adjuster::GasAdjusterLayer, healtcheck_server::HealthCheckLayer, house_keeper::HouseKeeperLayer, @@ -574,6 +575,16 @@ impl MainNodeBuilder { Ok(self) } + fn add_external_proof_integration_api_layer(mut self) -> anyhow::Result { + let config = try_load_config!(self.configs.external_proof_integration_api_config); + self.node.add_layer(ExternalProofIntegrationApiLayer::new( + config, + self.genesis_config.l1_batch_commit_data_generator_mode, + )); + + Ok(self) + } + /// This layer will make sure that the database is initialized correctly, /// e.g. genesis will be performed if it's required. /// @@ -718,6 +729,9 @@ impl MainNodeBuilder { Component::VmRunnerBwip => { self = self.add_vm_runner_bwip_layer()?; } + Component::ExternalProofIntegrationApi => { + self = self.add_external_proof_integration_api_layer()?; + } } } Ok(self.node.build()) diff --git a/core/lib/config/src/configs/external_proof_integration_api.rs b/core/lib/config/src/configs/external_proof_integration_api.rs new file mode 100644 index 000000000000..f9a43995ad17 --- /dev/null +++ b/core/lib/config/src/configs/external_proof_integration_api.rs @@ -0,0 +1,6 @@ +use serde::Deserialize; + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ExternalProofIntegrationApiConfig { + pub http_port: u16, +} diff --git a/core/lib/config/src/configs/general.rs b/core/lib/config/src/configs/general.rs index 5707b5c70492..91106a7ca1d4 100644 --- a/core/lib/config/src/configs/general.rs +++ b/core/lib/config/src/configs/general.rs @@ -14,8 +14,8 @@ use crate::{ FriWitnessVectorGeneratorConfig, ObservabilityConfig, PrometheusConfig, ProofDataHandlerConfig, }, - ApiConfig, ContractVerifierConfig, DBConfig, EthConfig, ObjectStoreConfig, PostgresConfig, - SnapshotsCreatorConfig, + ApiConfig, ContractVerifierConfig, DBConfig, EthConfig, ExternalProofIntegrationApiConfig, + ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, }; #[derive(Debug, Clone, PartialEq)] @@ -50,4 +50,5 @@ pub struct GeneralConfig { pub base_token_adjuster: Option, pub external_price_api_client_config: Option, pub consensus_config: Option, + pub external_proof_integration_api_config: Option, } diff --git a/core/lib/config/src/configs/mod.rs b/core/lib/config/src/configs/mod.rs index 0da6f986f353..eb6c26dbe944 100644 --- a/core/lib/config/src/configs/mod.rs +++ b/core/lib/config/src/configs/mod.rs @@ -11,6 +11,7 @@ pub use self::{ eth_watch::EthWatchConfig, experimental::ExperimentalDBConfig, external_price_api_client::ExternalPriceApiClientConfig, + external_proof_integration_api::ExternalProofIntegrationApiConfig, fri_proof_compressor::FriProofCompressorConfig, fri_prover::FriProverConfig, fri_prover_gateway::FriProverGatewayConfig, @@ -43,6 +44,7 @@ pub mod eth_sender; pub mod eth_watch; mod experimental; pub mod external_price_api_client; +pub mod external_proof_integration_api; pub mod fri_proof_compressor; pub mod fri_prover; pub mod fri_prover_gateway; diff --git a/core/lib/config/src/lib.rs b/core/lib/config/src/lib.rs index c5944e581a97..ae8288fa72ea 100644 --- a/core/lib/config/src/lib.rs +++ b/core/lib/config/src/lib.rs @@ -2,8 +2,8 @@ pub use crate::configs::{ ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, ContractsConfig, - DADispatcherConfig, DBConfig, EthConfig, EthWatchConfig, GasAdjusterConfig, GenesisConfig, - ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, + DADispatcherConfig, DBConfig, EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, + GasAdjusterConfig, GenesisConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, }; pub mod configs; diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index f31ec9a0acab..2d6f02e1f6c5 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -995,6 +995,19 @@ impl Distribution for Enc } } +impl Distribution + for EncodeDist +{ + fn sample( + &self, + rng: &mut R, + ) -> configs::external_proof_integration_api::ExternalProofIntegrationApiConfig { + configs::external_proof_integration_api::ExternalProofIntegrationApiConfig { + http_port: self.sample(rng), + } + } +} + impl Distribution for EncodeDist { fn sample( &self, @@ -1044,6 +1057,7 @@ impl Distribution for EncodeDist { base_token_adjuster: self.sample(rng), external_price_api_client_config: self.sample(rng), consensus_config: self.sample(rng), + external_proof_integration_api_config: self.sample(rng), } } } diff --git a/core/lib/dal/.sqlx/query-782726c01284ded3905c312262afd52908e6748be50524871ff40e3301fecab1.json b/core/lib/dal/.sqlx/query-782726c01284ded3905c312262afd52908e6748be50524871ff40e3301fecab1.json new file mode 100644 index 000000000000..2be235312c77 --- /dev/null +++ b/core/lib/dal/.sqlx/query-782726c01284ded3905c312262afd52908e6748be50524871ff40e3301fecab1.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number\n FROM\n proof_generation_details\n WHERE\n proof_blob_url IS NOT NULL\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "782726c01284ded3905c312262afd52908e6748be50524871ff40e3301fecab1" +} diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index 4e37cc644f8e..808f4c6f48a0 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -88,6 +88,29 @@ impl ProofGenerationDal<'_, '_> { Ok(result) } + pub async fn get_available_batch(&mut self) -> DalResult { + let result = sqlx::query!( + r#" + SELECT + l1_batch_number + FROM + proof_generation_details + WHERE + proof_blob_url IS NOT NULL + ORDER BY + l1_batch_number ASC + LIMIT + 1 + "#, + ) + .instrument("get_available batch") + .fetch_one(self.storage) + .await? + .l1_batch_number as u32; + + Ok(L1BatchNumber(result)) + } + /// Marks a previously locked batch as 'unpicked', allowing it to be picked without having /// to wait for the processing timeout. pub async fn unlock_batch(&mut self, l1_batch_number: L1BatchNumber) -> DalResult<()> { diff --git a/core/lib/env_config/src/external_proof_integration_api.rs b/core/lib/env_config/src/external_proof_integration_api.rs new file mode 100644 index 000000000000..dddca93eb0ec --- /dev/null +++ b/core/lib/env_config/src/external_proof_integration_api.rs @@ -0,0 +1,35 @@ +use zksync_config::configs::ExternalProofIntegrationApiConfig; + +use crate::{envy_load, FromEnv}; + +impl FromEnv for ExternalProofIntegrationApiConfig { + fn from_env() -> anyhow::Result { + envy_load( + "external_proof_integration_api", + "EXTERNAL_PROOF_INTEGRATION_API_", + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::EnvMutex; + + static MUTEX: EnvMutex = EnvMutex::new(); + + fn expected_config() -> ExternalProofIntegrationApiConfig { + ExternalProofIntegrationApiConfig { http_port: 3320 } + } + + #[test] + fn from_env() { + let config = r#" + EXTERNAL_PROOF_INTEGRATION_API_HTTP_PORT="3320" + "#; + let mut lock = MUTEX.lock(); + lock.set_env(config); + let actual = ExternalProofIntegrationApiConfig::from_env().unwrap(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/env_config/src/lib.rs b/core/lib/env_config/src/lib.rs index 789f6f8be2fd..fcb0f3625ea1 100644 --- a/core/lib/env_config/src/lib.rs +++ b/core/lib/env_config/src/lib.rs @@ -24,6 +24,7 @@ mod utils; mod base_token_adjuster; mod da_dispatcher; mod external_price_api_client; +mod external_proof_integration_api; mod genesis; #[cfg(test)] mod test_utils; diff --git a/core/lib/protobuf_config/src/external_proof_integration_api.rs b/core/lib/protobuf_config/src/external_proof_integration_api.rs new file mode 100644 index 000000000000..e824df50dfc6 --- /dev/null +++ b/core/lib/protobuf_config/src/external_proof_integration_api.rs @@ -0,0 +1,22 @@ +use anyhow::Context; +use zksync_config::ExternalProofIntegrationApiConfig; +use zksync_protobuf::{required, ProtoRepr}; + +use crate::proto::external_proof_integration_api as proto; + +impl ProtoRepr for proto::ExternalProofIntegrationApi { + type Type = ExternalProofIntegrationApiConfig; + fn read(&self) -> anyhow::Result { + Ok(Self::Type { + http_port: required(&self.http_port) + .and_then(|p| Ok((*p).try_into()?)) + .context("http_port")?, + }) + } + + fn build(this: &Self::Type) -> Self { + Self { + http_port: Some(this.http_port.into()), + } + } +} diff --git a/core/lib/protobuf_config/src/general.rs b/core/lib/protobuf_config/src/general.rs index 367458f7aa25..88da18997608 100644 --- a/core/lib/protobuf_config/src/general.rs +++ b/core/lib/protobuf_config/src/general.rs @@ -40,6 +40,9 @@ impl ProtoRepr for proto::GeneralConfig { snapshot_recovery: read_optional_repr(&self.snapshot_recovery), external_price_api_client_config: read_optional_repr(&self.external_price_api_client), consensus_config: read_optional_repr(&self.consensus), + external_proof_integration_api_config: read_optional_repr( + &self.external_proof_integration_api, + ), }) } @@ -90,6 +93,10 @@ impl ProtoRepr for proto::GeneralConfig { .as_ref() .map(ProtoRepr::build), consensus: this.consensus_config.as_ref().map(ProtoRepr::build), + external_proof_integration_api: this + .external_proof_integration_api_config + .as_ref() + .map(ProtoRepr::build), } } } diff --git a/core/lib/protobuf_config/src/lib.rs b/core/lib/protobuf_config/src/lib.rs index d7a4a4e570ad..ee526b2bb67f 100644 --- a/core/lib/protobuf_config/src/lib.rs +++ b/core/lib/protobuf_config/src/lib.rs @@ -30,6 +30,7 @@ mod secrets; mod snapshots_creator; mod external_price_api_client; +mod external_proof_integration_api; mod snapshot_recovery; #[cfg(test)] mod tests; diff --git a/core/lib/protobuf_config/src/proto/config/external_proof_integration_api.proto b/core/lib/protobuf_config/src/proto/config/external_proof_integration_api.proto new file mode 100644 index 000000000000..07203202c9d6 --- /dev/null +++ b/core/lib/protobuf_config/src/proto/config/external_proof_integration_api.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package zksync.config.external_proof_integration_api; + +message ExternalProofIntegrationApi { + optional uint32 http_port = 1; +} diff --git a/core/lib/protobuf_config/src/proto/config/general.proto b/core/lib/protobuf_config/src/proto/config/general.proto index 37d507b9ab62..cb4629f2d85f 100644 --- a/core/lib/protobuf_config/src/proto/config/general.proto +++ b/core/lib/protobuf_config/src/proto/config/general.proto @@ -21,37 +21,39 @@ import "zksync/config/pruning.proto"; import "zksync/config/object_store.proto"; import "zksync/config/base_token_adjuster.proto"; import "zksync/config/external_price_api_client.proto"; +import "zksync/config/external_proof_integration_api.proto"; import "zksync/core/consensus.proto"; message GeneralConfig { - optional database.Postgres postgres = 1; - optional api.Api api = 2; - optional contract_verifier.ContractVerifier contract_verifier = 3; - optional circuit_breaker.CircuitBreaker circuit_breaker = 5; - optional chain.Mempool mempool = 6; - optional chain.OperationsManager operations_manager = 8; - optional chain.StateKeeper state_keeper = 9; - optional house_keeper.HouseKeeper house_keeper = 10; - optional prover.Prover prover = 12; - optional utils.Prometheus prometheus = 15; - optional database.DB db = 20; - optional eth.ETH eth = 22; - optional prover.WitnessGenerator witness_generator = 24; - optional prover.WitnessVectorGenerator witness_vector_generator = 25; - optional prover.ProofCompressor proof_compressor = 27; - optional prover.ProofDataHandler data_handler = 28; - optional prover.ProverGroup prover_group = 29; - optional prover.ProverGateway prover_gateway = 30; - optional snapshot_creator.SnapshotsCreator snapshot_creator = 31; - optional observability.Observability observability = 32; - optional vm_runner.ProtectiveReadsWriter protective_reads_writer = 33; - optional object_store.ObjectStore core_object_store = 34; - optional snapshot_recovery.SnapshotRecovery snapshot_recovery = 35; - optional pruning.Pruning pruning = 36; - optional commitment_generator.CommitmentGenerator commitment_generator = 37; - optional da_dispatcher.DataAvailabilityDispatcher da_dispatcher = 38; - optional base_token_adjuster.BaseTokenAdjuster base_token_adjuster = 39; - optional vm_runner.BasicWitnessInputProducer basic_witness_input_producer = 40; - optional external_price_api_client.ExternalPriceApiClient external_price_api_client = 41; - optional core.consensus.Config consensus = 42; + optional database.Postgres postgres = 1; + optional api.Api api = 2; + optional contract_verifier.ContractVerifier contract_verifier = 3; + optional circuit_breaker.CircuitBreaker circuit_breaker = 5; + optional chain.Mempool mempool = 6; + optional chain.OperationsManager operations_manager = 8; + optional chain.StateKeeper state_keeper = 9; + optional house_keeper.HouseKeeper house_keeper = 10; + optional prover.Prover prover = 12; + optional utils.Prometheus prometheus = 15; + optional database.DB db = 20; + optional eth.ETH eth = 22; + optional prover.WitnessGenerator witness_generator = 24; + optional prover.WitnessVectorGenerator witness_vector_generator = 25; + optional prover.ProofCompressor proof_compressor = 27; + optional prover.ProofDataHandler data_handler = 28; + optional prover.ProverGroup prover_group = 29; + optional prover.ProverGateway prover_gateway = 30; + optional snapshot_creator.SnapshotsCreator snapshot_creator = 31; + optional observability.Observability observability = 32; + optional vm_runner.ProtectiveReadsWriter protective_reads_writer = 33; + optional object_store.ObjectStore core_object_store = 34; + optional snapshot_recovery.SnapshotRecovery snapshot_recovery = 35; + optional pruning.Pruning pruning = 36; + optional commitment_generator.CommitmentGenerator commitment_generator = 37; + optional da_dispatcher.DataAvailabilityDispatcher da_dispatcher = 38; + optional base_token_adjuster.BaseTokenAdjuster base_token_adjuster = 39; + optional vm_runner.BasicWitnessInputProducer basic_witness_input_producer = 40; + optional external_price_api_client.ExternalPriceApiClient external_price_api_client = 41; + optional core.consensus.Config consensus = 42; + optional external_proof_integration_api.ExternalProofIntegrationApi external_proof_integration_api = 43; } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 00ac85a40739..e0b617ee3bea 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -14,7 +14,7 @@ use crate::{ // Structs for holding data returned in HTTP responses -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ProofGenerationData { pub l1_batch_number: L1BatchNumber, pub witness_input_data: WitnessInputData, @@ -61,6 +61,12 @@ pub enum SubmitProofRequest { SkippedProofGeneration, } +#[derive(Debug, Serialize, Deserialize)] +pub struct OptionalProofGenerationDataRequest(pub Option); + +#[derive(Debug, Serialize, Deserialize)] +pub struct VerifyProofRequest(pub Box); + #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct SubmitTeeProofRequest(pub Box); diff --git a/core/lib/zksync_core_leftovers/src/lib.rs b/core/lib/zksync_core_leftovers/src/lib.rs index b79b86d718d0..c443fb9e8e07 100644 --- a/core/lib/zksync_core_leftovers/src/lib.rs +++ b/core/lib/zksync_core_leftovers/src/lib.rs @@ -62,6 +62,8 @@ pub enum Component { BaseTokenRatioPersister, /// VM runner-based component that saves VM execution data for basic witness generation. VmRunnerBwip, + /// External prover API that is used to retrieve data for proving and verifies final proofs against ones, generated by us + ExternalProofIntegrationApi, } #[derive(Debug)] @@ -106,6 +108,9 @@ impl FromStr for Components { Ok(Components(vec![Component::BaseTokenRatioPersister])) } "vm_runner_bwip" => Ok(Components(vec![Component::VmRunnerBwip])), + "external_proof_integration_api" => { + Ok(Components(vec![Component::ExternalProofIntegrationApi])) + } other => Err(format!("{} is not a valid component name", other)), } } diff --git a/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs b/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs index 1ad688ed14cb..6d2a06605be0 100644 --- a/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs +++ b/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs @@ -19,8 +19,8 @@ use zksync_config::{ PruningConfig, SnapshotRecoveryConfig, }, ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, DADispatcherConfig, DBConfig, - EthConfig, EthWatchConfig, GasAdjusterConfig, ObjectStoreConfig, PostgresConfig, - SnapshotsCreatorConfig, + EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, + ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, }; use zksync_env_config::FromEnv; use zksync_protobuf::repr::ProtoRepr; @@ -77,6 +77,7 @@ pub struct TempConfigStore { pub pruning: Option, pub snapshot_recovery: Option, pub external_price_api_client_config: Option, + pub external_proof_integration_api_config: Option, } impl TempConfigStore { @@ -112,6 +113,9 @@ impl TempConfigStore { pruning: self.pruning.clone(), external_price_api_client_config: self.external_price_api_client_config.clone(), consensus_config: None, + external_proof_integration_api_config: self + .external_proof_integration_api_config + .clone(), } } @@ -183,6 +187,7 @@ fn load_env_config() -> anyhow::Result { pruning: None, snapshot_recovery: None, external_price_api_client_config: ExternalPriceApiClientConfig::from_env().ok(), + external_proof_integration_api_config: ExternalProofIntegrationApiConfig::from_env().ok(), }) } diff --git a/core/node/external_proof_integration_api/Cargo.toml b/core/node/external_proof_integration_api/Cargo.toml new file mode 100644 index 000000000000..ae7cd4c4d031 --- /dev/null +++ b/core/node/external_proof_integration_api/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "zksync_external_proof_integration_api" +description = "ZKsync external proof integration API" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +axum.workspace = true +tracing.workspace = true +zksync_prover_interface.workspace = true +zksync_basic_types.workspace = true +zksync_config.workspace = true +zksync_object_store.workspace = true +zksync_dal.workspace = true +tokio.workspace = true +bincode.workspace = true +anyhow.workspace = true diff --git a/core/node/external_proof_integration_api/src/error.rs b/core/node/external_proof_integration_api/src/error.rs new file mode 100644 index 000000000000..dac8e2a27ed6 --- /dev/null +++ b/core/node/external_proof_integration_api/src/error.rs @@ -0,0 +1,86 @@ +use axum::{ + http::StatusCode, + response::{IntoResponse, Response}, +}; +use zksync_basic_types::L1BatchNumber; +use zksync_dal::DalError; +use zksync_object_store::ObjectStoreError; + +pub(crate) enum ProcessorError { + ObjectStore(ObjectStoreError), + Dal(DalError), + Serialization(bincode::Error), + InvalidProof, + BatchNotReady(L1BatchNumber), +} + +impl From for ProcessorError { + fn from(err: ObjectStoreError) -> Self { + Self::ObjectStore(err) + } +} + +impl From for ProcessorError { + fn from(err: DalError) -> Self { + Self::Dal(err) + } +} + +impl From for ProcessorError { + fn from(err: bincode::Error) -> Self { + Self::Serialization(err) + } +} + +impl IntoResponse for ProcessorError { + fn into_response(self) -> Response { + let (status_code, message) = match self { + ProcessorError::ObjectStore(err) => { + tracing::error!("GCS error: {:?}", err); + match err { + ObjectStoreError::KeyNotFound(_) => ( + StatusCode::NOT_FOUND, + "Proof verification not possible anymore, batch is too old.".to_owned(), + ), + _ => ( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed fetching from GCS".to_owned(), + ), + } + } + ProcessorError::Dal(err) => { + tracing::error!("Sqlx error: {:?}", err); + match err.inner() { + zksync_dal::SqlxError::RowNotFound => { + (StatusCode::NOT_FOUND, "Non existing L1 batch".to_owned()) + } + _ => ( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed fetching/saving from db".to_owned(), + ), + } + } + ProcessorError::Serialization(err) => { + tracing::error!("Serialization error: {:?}", err); + ( + StatusCode::BAD_REQUEST, + "Failed to deserialize proof data".to_owned(), + ) + } + ProcessorError::BatchNotReady(l1_batch_number) => { + tracing::error!( + "Batch {l1_batch_number:?} is not yet ready for proving. Most likely our proof for this batch is not generated yet" + ); + ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("Batch {l1_batch_number:?} is not yet ready for proving. Most likely our proof for this batch is not generated yet, try again later"), + ) + } + ProcessorError::InvalidProof => { + tracing::error!("Invalid proof data"); + (StatusCode::BAD_REQUEST, "Invalid proof data".to_owned()) + } + }; + (status_code, message).into_response() + } +} diff --git a/core/node/external_proof_integration_api/src/lib.rs b/core/node/external_proof_integration_api/src/lib.rs new file mode 100644 index 000000000000..51fecf8c23fc --- /dev/null +++ b/core/node/external_proof_integration_api/src/lib.rs @@ -0,0 +1,73 @@ +mod error; +mod processor; + +use std::{net::SocketAddr, sync::Arc}; + +use anyhow::Context; +use axum::{extract::Path, routing::post, Json, Router}; +use tokio::sync::watch; +use zksync_basic_types::commitment::L1BatchCommitmentMode; +use zksync_config::configs::external_proof_integration_api::ExternalProofIntegrationApiConfig; +use zksync_dal::{ConnectionPool, Core}; +use zksync_object_store::ObjectStore; +use zksync_prover_interface::api::{OptionalProofGenerationDataRequest, VerifyProofRequest}; + +use crate::processor::Processor; + +pub async fn run_server( + config: ExternalProofIntegrationApiConfig, + blob_store: Arc, + connection_pool: ConnectionPool, + commitment_mode: L1BatchCommitmentMode, + mut stop_receiver: watch::Receiver, +) -> anyhow::Result<()> { + let bind_address = SocketAddr::from(([0, 0, 0, 0], config.http_port)); + tracing::debug!("Starting external prover API server on {bind_address}"); + let app = create_router(blob_store, connection_pool, commitment_mode).await; + + let listener = tokio::net::TcpListener::bind(bind_address) + .await + .with_context(|| format!("Failed binding external prover API server to {bind_address}"))?; + axum::serve(listener, app) + .with_graceful_shutdown(async move { + if stop_receiver.changed().await.is_err() { + tracing::warn!("Stop signal sender for external prover API server was dropped without sending a signal"); + } + tracing::info!("Stop signal received, external prover API server is shutting down"); + }) + .await + .context("External prover API server failed")?; + tracing::info!("External prover API server shut down"); + Ok(()) +} + +async fn create_router( + blob_store: Arc, + connection_pool: ConnectionPool, + commitment_mode: L1BatchCommitmentMode, +) -> Router { + let mut processor = + Processor::new(blob_store.clone(), connection_pool.clone(), commitment_mode); + let verify_proof_processor = processor.clone(); + Router::new() + .route( + "/proof_generation_data", + post( + // we use post method because the returned data is not idempotent, + // i.e we return different result on each call. + move |payload: Json| async move { + processor.get_proof_generation_data(payload).await + }, + ), + ) + .route( + "/verify_proof/:l1_batch_number", + post( + move |l1_batch_number: Path, payload: Json| async move { + verify_proof_processor + .verify_proof(l1_batch_number, payload) + .await + }, + ), + ) +} diff --git a/core/node/external_proof_integration_api/src/processor.rs b/core/node/external_proof_integration_api/src/processor.rs new file mode 100644 index 000000000000..a15e45e48037 --- /dev/null +++ b/core/node/external_proof_integration_api/src/processor.rs @@ -0,0 +1,190 @@ +use std::sync::Arc; + +use axum::{extract::Path, Json}; +use zksync_basic_types::{ + basic_fri_types::Eip4844Blobs, commitment::L1BatchCommitmentMode, L1BatchNumber, +}; +use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_object_store::{bincode, ObjectStore}; +use zksync_prover_interface::{ + api::{ + OptionalProofGenerationDataRequest, ProofGenerationData, ProofGenerationDataResponse, + VerifyProofRequest, + }, + inputs::{ + L1BatchMetadataHashes, VMRunWitnessInputData, WitnessInputData, WitnessInputMerklePaths, + }, + outputs::L1BatchProofForL1, +}; + +use crate::error::ProcessorError; + +#[derive(Clone)] +pub(crate) struct Processor { + blob_store: Arc, + pool: ConnectionPool, + commitment_mode: L1BatchCommitmentMode, +} + +impl Processor { + pub(crate) fn new( + blob_store: Arc, + pool: ConnectionPool, + commitment_mode: L1BatchCommitmentMode, + ) -> Self { + Self { + blob_store, + pool, + commitment_mode, + } + } + + #[tracing::instrument(skip_all)] + pub(crate) async fn get_proof_generation_data( + &mut self, + request: Json, + ) -> Result, ProcessorError> { + tracing::info!("Received request for proof generation data: {:?}", request); + + let latest_available_batch = self + .pool + .connection() + .await + .unwrap() + .proof_generation_dal() + .get_available_batch() + .await?; + + let l1_batch_number = if let Some(l1_batch_number) = request.0 .0 { + if l1_batch_number > latest_available_batch { + tracing::error!( + "Requested batch is not available: {:?}, latest available batch is {:?}", + l1_batch_number, + latest_available_batch + ); + return Err(ProcessorError::BatchNotReady(l1_batch_number)); + } + l1_batch_number + } else { + latest_available_batch + }; + + let proof_generation_data = self + .proof_generation_data_for_existing_batch(l1_batch_number) + .await; + + match proof_generation_data { + Ok(data) => Ok(Json(ProofGenerationDataResponse::Success(Some(Box::new( + data, + ))))), + Err(err) => Err(err), + } + } + + #[tracing::instrument(skip(self))] + async fn proof_generation_data_for_existing_batch( + &self, + l1_batch_number: L1BatchNumber, + ) -> Result { + let vm_run_data: VMRunWitnessInputData = self + .blob_store + .get(l1_batch_number) + .await + .map_err(ProcessorError::ObjectStore)?; + let merkle_paths: WitnessInputMerklePaths = self + .blob_store + .get(l1_batch_number) + .await + .map_err(ProcessorError::ObjectStore)?; + + // Acquire connection after interacting with GCP, to avoid holding the connection for too long. + let mut conn = self.pool.connection().await.map_err(ProcessorError::Dal)?; + + let previous_batch_metadata = conn + .blocks_dal() + .get_l1_batch_metadata(L1BatchNumber(l1_batch_number.checked_sub(1).unwrap())) + .await + .map_err(ProcessorError::Dal)? + .expect("No metadata for previous batch"); + + let header = conn + .blocks_dal() + .get_l1_batch_header(l1_batch_number) + .await + .map_err(ProcessorError::Dal)? + .unwrap_or_else(|| panic!("Missing header for {}", l1_batch_number)); + + let minor_version = header.protocol_version.unwrap(); + let protocol_version = conn + .protocol_versions_dal() + .get_protocol_version_with_latest_patch(minor_version) + .await + .map_err(ProcessorError::Dal)? + .unwrap_or_else(|| { + panic!("Missing l1 verifier info for protocol version {minor_version}") + }); + + let batch_header = conn + .blocks_dal() + .get_l1_batch_header(l1_batch_number) + .await + .map_err(ProcessorError::Dal)? + .unwrap_or_else(|| panic!("Missing header for {}", l1_batch_number)); + + let eip_4844_blobs = match self.commitment_mode { + L1BatchCommitmentMode::Validium => Eip4844Blobs::empty(), + L1BatchCommitmentMode::Rollup => { + let blobs = batch_header.pubdata_input.as_deref().unwrap_or_else(|| { + panic!( + "expected pubdata, but it is not available for batch {l1_batch_number:?}" + ) + }); + Eip4844Blobs::decode(blobs).expect("failed to decode EIP-4844 blobs") + } + }; + + let blob = WitnessInputData { + vm_run_data, + merkle_paths, + eip_4844_blobs, + previous_batch_metadata: L1BatchMetadataHashes { + root_hash: previous_batch_metadata.metadata.root_hash, + meta_hash: previous_batch_metadata.metadata.meta_parameters_hash, + aux_hash: previous_batch_metadata.metadata.aux_data_hash, + }, + }; + + Ok(ProofGenerationData { + l1_batch_number, + witness_input_data: blob, + protocol_version: protocol_version.version, + l1_verifier_config: protocol_version.l1_verifier_config, + }) + } + + pub(crate) async fn verify_proof( + &self, + Path(l1_batch_number): Path, + Json(payload): Json, + ) -> Result<(), ProcessorError> { + let l1_batch_number = L1BatchNumber(l1_batch_number); + tracing::info!( + "Received request to verify proof for batch: {:?}", + l1_batch_number + ); + + let serialized_proof = bincode::serialize(&payload.0)?; + let expected_proof = bincode::serialize( + &self + .blob_store + .get::((l1_batch_number, payload.0.protocol_version)) + .await?, + )?; + + if serialized_proof != expected_proof { + return Err(ProcessorError::InvalidProof); + } + + Ok(()) + } +} diff --git a/core/node/node_framework/Cargo.toml b/core/node/node_framework/Cargo.toml index 640000c6a7d8..142d6cfa11ab 100644 --- a/core/node/node_framework/Cargo.toml +++ b/core/node/node_framework/Cargo.toml @@ -53,6 +53,7 @@ zksync_node_db_pruner.workspace = true zksync_base_token_adjuster.workspace = true zksync_node_storage_init.workspace = true zksync_external_price_api.workspace = true +zksync_external_proof_integration_api.workspace = true pin-project-lite.workspace = true tracing.workspace = true diff --git a/core/node/node_framework/src/implementations/layers/external_proof_integration_api.rs b/core/node/node_framework/src/implementations/layers/external_proof_integration_api.rs new file mode 100644 index 000000000000..7877bc6abbe3 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/external_proof_integration_api.rs @@ -0,0 +1,100 @@ +use std::sync::Arc; + +use zksync_config::configs::external_proof_integration_api::ExternalProofIntegrationApiConfig; +use zksync_dal::{ConnectionPool, Core}; +use zksync_object_store::ObjectStore; +use zksync_types::commitment::L1BatchCommitmentMode; + +use crate::{ + implementations::resources::{ + object_store::ObjectStoreResource, + pools::{PoolResource, ReplicaPool}, + }, + service::StopReceiver, + task::{Task, TaskId}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Wiring layer for proof data handler server. +#[derive(Debug)] +pub struct ExternalProofIntegrationApiLayer { + external_proof_integration_api_config: ExternalProofIntegrationApiConfig, + commitment_mode: L1BatchCommitmentMode, +} + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub master_pool: PoolResource, + pub object_store: ObjectStoreResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + #[context(task)] + pub task: ProverApiTask, +} + +impl ExternalProofIntegrationApiLayer { + pub fn new( + external_proof_integration_api_config: ExternalProofIntegrationApiConfig, + commitment_mode: L1BatchCommitmentMode, + ) -> Self { + Self { + external_proof_integration_api_config, + commitment_mode, + } + } +} + +#[async_trait::async_trait] +impl WiringLayer for ExternalProofIntegrationApiLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "external_proof_integration_api_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let main_pool = input.master_pool.get().await.unwrap(); + let blob_store = input.object_store.0; + + let task = ProverApiTask { + external_proof_integration_api_config: self.external_proof_integration_api_config, + blob_store, + main_pool, + commitment_mode: self.commitment_mode, + }; + + Ok(Output { task }) + } +} + +#[derive(Debug)] +pub struct ProverApiTask { + external_proof_integration_api_config: ExternalProofIntegrationApiConfig, + blob_store: Arc, + main_pool: ConnectionPool, + commitment_mode: L1BatchCommitmentMode, +} + +#[async_trait::async_trait] +impl Task for ProverApiTask { + fn id(&self) -> TaskId { + "external_proof_integration_api".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + zksync_external_proof_integration_api::run_server( + self.external_proof_integration_api_config, + self.blob_store, + self.main_pool, + self.commitment_mode, + stop_receiver.0, + ) + .await + } +} diff --git a/core/node/node_framework/src/implementations/layers/mod.rs b/core/node/node_framework/src/implementations/layers/mod.rs index 99e50be8ae5d..6256f2d61043 100644 --- a/core/node/node_framework/src/implementations/layers/mod.rs +++ b/core/node/node_framework/src/implementations/layers/mod.rs @@ -9,6 +9,7 @@ pub mod contract_verification_api; pub mod da_dispatcher; pub mod eth_sender; pub mod eth_watch; +pub mod external_proof_integration_api; pub mod gas_adjuster; pub mod healtcheck_server; pub mod house_keeper; diff --git a/etc/env/base/external_proof_integration_api.toml b/etc/env/base/external_proof_integration_api.toml new file mode 100644 index 000000000000..5918a061be3a --- /dev/null +++ b/etc/env/base/external_proof_integration_api.toml @@ -0,0 +1,2 @@ +[external_proof_integration_api] +http_port = 3073 diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 300138e9a867..97d29c45b0f7 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -41,7 +41,7 @@ api: estimate_gas_scale_factor: 1.3 estimate_gas_acceptable_overestimation: 5000 max_tx_size: 1000000 - api_namespaces: [en,eth,net,web3,zks,pubsub,debug] + api_namespaces: [ en,eth,net,web3,zks,pubsub,debug ] state_keeper: transaction_slots: 8192 max_allowed_l2_tx_gas_limit: 15000000000 @@ -104,7 +104,7 @@ eth: aggregated_block_execute_deadline: 10 timestamp_criteria_max_allowed_lag: 30 max_eth_tx_data_size: 120000 - aggregated_proof_sizes: [1] + aggregated_proof_sizes: [ 1 ] max_aggregated_tx_gas: 15000000 max_acceptable_priority_fee_in_gwei: 100000000000 pubdata_sending_mode: BLOBS @@ -302,7 +302,7 @@ prometheus: observability: log_format: plain - log_directives: "zksync_node_test_utils=info,zksync_state_keeper=info,zksync_reorg_detector=info,zksync_consistency_checker=info,zksync_metadata_calculator=info,zksync_node_sync=info,zksync_node_consensus=info,zksync_contract_verification_server=info,zksync_node_api_server=info,zksync_tee_verifier_input_producer=info,zksync_node_framework=info,zksync_block_reverter=info,zksync_commitment_generator=info,zksync_node_db_pruner=info,zksync_eth_sender=info,zksync_node_fee_model=info,zksync_node_genesis=info,zksync_house_keeper=info,zksync_proof_data_handler=info,zksync_shared_metrics=info,zksync_node_test_utils=info,zksync_vm_runner=info,zksync_consensus_bft=info,zksync_consensus_network=info,zksync_consensus_storage=info,zksync_core_leftovers=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=info,zksync_db_connection=info,zksync_eth_client=info,zksync_eth_watch=debug,zksync_storage=info,zksync_db_manager=info,zksync_merkle_tree=info,zksync_state=debug,zksync_utils=debug,zksync_queued_job_processor=info,zksync_types=info,zksync_mempool=debug,loadnext=info,vm=info,zksync_object_store=info,zksync_external_node=info,zksync_witness_generator=info,zksync_prover_fri=info,zksync_witness_vector_generator=info,zksync_web3_decl=debug,zksync_health_check=debug,zksync_proof_fri_compressor=info,vise_exporter=error,snapshots_creator=debug,zksync_base_token_adjuster=debug,zksync_external_price_api=debug" + log_directives: "zksync_node_test_utils=info,zksync_state_keeper=info,zksync_reorg_detector=info,zksync_consistency_checker=info,zksync_metadata_calculator=info,zksync_node_sync=info,zksync_node_consensus=info,zksync_contract_verification_server=info,zksync_node_api_server=info,zksync_tee_verifier_input_producer=info,zksync_node_framework=info,zksync_block_reverter=info,zksync_commitment_generator=info,zksync_node_db_pruner=info,zksync_eth_sender=info,zksync_node_fee_model=info,zksync_node_genesis=info,zksync_house_keeper=info,zksync_proof_data_handler=info,zksync_shared_metrics=info,zksync_node_test_utils=info,zksync_vm_runner=info,zksync_consensus_bft=info,zksync_consensus_network=info,zksync_consensus_storage=info,zksync_core_leftovers=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=info,zksync_db_connection=info,zksync_eth_client=info,zksync_eth_watch=debug,zksync_storage=info,zksync_db_manager=info,zksync_merkle_tree=info,zksync_state=debug,zksync_utils=debug,zksync_queued_job_processor=info,zksync_types=info,zksync_mempool=debug,loadnext=info,vm=info,zksync_object_store=info,zksync_external_node=info,zksync_witness_generator=info,zksync_prover_fri=info,zksync_witness_vector_generator=info,zksync_web3_decl=debug,zksync_health_check=debug,zksync_proof_fri_compressor=info,vise_exporter=error,snapshots_creator=debug,zksync_base_token_adjuster=debug,zksync_external_price_api=debug,zksync_external_proof_integration_api=info" # Uncomment only if needed # sentry: # url: unset @@ -354,3 +354,6 @@ da_dispatcher: polling_interval_ms: 5000 max_rows_to_dispatch: 100 max_retries: 5 + +external_proof_integration_api: + http_port: 3073 From 1ac52c5211983f4affbf04dbe23d354a5b4b67eb Mon Sep 17 00:00:00 2001 From: zksync-era-bot <147085853+zksync-era-bot@users.noreply.github.com> Date: Thu, 8 Aug 2024 17:13:38 +0300 Subject: [PATCH 7/8] chore(main): release core 24.16.0 (#2608) :robot: I have created a release *beep* *boop* --- ## [24.16.0](https://github.com/matter-labs/zksync-era/compare/core-v24.15.0...core-v24.16.0) (2024-08-08) ### Features * External prover API ([#2538](https://github.com/matter-labs/zksync-era/issues/2538)) ([129a181](https://github.com/matter-labs/zksync-era/commit/129a1819262d64a36d651af01fdab93c5ff91712)) * **node-framework:** Add API fee params resource ([#2621](https://github.com/matter-labs/zksync-era/issues/2621)) ([aff7b65](https://github.com/matter-labs/zksync-era/commit/aff7b6535ef92aaced0dd7fa1cc08d656cba027e)) * **vlog:** Expose more resource values via opentelemetry ([#2620](https://github.com/matter-labs/zksync-era/issues/2620)) ([7ae07e4](https://github.com/matter-labs/zksync-era/commit/7ae07e446c9732a896ca8246d324e82c6e6d5a46)) * **vlog:** Report observability config, flush, and shutdown ([#2622](https://github.com/matter-labs/zksync-era/issues/2622)) ([e23e661](https://github.com/matter-labs/zksync-era/commit/e23e6611731835ef3abd34f3f9867f9dc533eb21)) ### Bug Fixes * Bump prover dependencies & rust toolchain ([#2600](https://github.com/matter-labs/zksync-era/issues/2600)) ([849c6a5](https://github.com/matter-labs/zksync-era/commit/849c6a5dcd095e8fead0630a2a403f282c26a2aa)) * **en:** Initialize SyncState in OutputHandler ([#2618](https://github.com/matter-labs/zksync-era/issues/2618)) ([f0c8506](https://github.com/matter-labs/zksync-era/commit/f0c85062fac96180c2e0dec52086714ee3783fcf)) * restrictive genesis parsing ([#2605](https://github.com/matter-labs/zksync-era/issues/2605)) ([d5f8f38](https://github.com/matter-labs/zksync-era/commit/d5f8f3892a14180f590cabab921d3a68dec903e3)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --------- Co-authored-by: zksync-era-bot --- .github/release-please/manifest.json | 2 +- Cargo.lock | 2 +- core/CHANGELOG.md | 17 +++++++++++++++++ core/bin/external_node/Cargo.toml | 2 +- 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index 86dfd338a8cd..1daa63226906 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,5 +1,5 @@ { - "core": "24.15.0", + "core": "24.16.0", "prover": "16.3.0", "zk_toolbox": "0.1.1" } diff --git a/Cargo.lock b/Cargo.lock index 88b5204d317a..c352625b943c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8652,7 +8652,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "24.15.0" +version = "24.16.0" dependencies = [ "anyhow", "assert_matches", diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index 8488606a4058..2632d997c21d 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [24.16.0](https://github.com/matter-labs/zksync-era/compare/core-v24.15.0...core-v24.16.0) (2024-08-08) + + +### Features + +* External prover API ([#2538](https://github.com/matter-labs/zksync-era/issues/2538)) ([129a181](https://github.com/matter-labs/zksync-era/commit/129a1819262d64a36d651af01fdab93c5ff91712)) +* **node-framework:** Add API fee params resource ([#2621](https://github.com/matter-labs/zksync-era/issues/2621)) ([aff7b65](https://github.com/matter-labs/zksync-era/commit/aff7b6535ef92aaced0dd7fa1cc08d656cba027e)) +* **vlog:** Expose more resource values via opentelemetry ([#2620](https://github.com/matter-labs/zksync-era/issues/2620)) ([7ae07e4](https://github.com/matter-labs/zksync-era/commit/7ae07e446c9732a896ca8246d324e82c6e6d5a46)) +* **vlog:** Report observability config, flush, and shutdown ([#2622](https://github.com/matter-labs/zksync-era/issues/2622)) ([e23e661](https://github.com/matter-labs/zksync-era/commit/e23e6611731835ef3abd34f3f9867f9dc533eb21)) + + +### Bug Fixes + +* Bump prover dependencies & rust toolchain ([#2600](https://github.com/matter-labs/zksync-era/issues/2600)) ([849c6a5](https://github.com/matter-labs/zksync-era/commit/849c6a5dcd095e8fead0630a2a403f282c26a2aa)) +* **en:** Initialize SyncState in OutputHandler ([#2618](https://github.com/matter-labs/zksync-era/issues/2618)) ([f0c8506](https://github.com/matter-labs/zksync-era/commit/f0c85062fac96180c2e0dec52086714ee3783fcf)) +* restrictive genesis parsing ([#2605](https://github.com/matter-labs/zksync-era/issues/2605)) ([d5f8f38](https://github.com/matter-labs/zksync-era/commit/d5f8f3892a14180f590cabab921d3a68dec903e3)) + ## [24.15.0](https://github.com/matter-labs/zksync-era/compare/core-v24.14.0...core-v24.15.0) (2024-08-07) diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index 96a09a2ba2a9..4a3a4f14a556 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "24.15.0" # x-release-please-version +version = "24.16.0" # x-release-please-version edition.workspace = true authors.workspace = true homepage.workspace = true From d40ff5f3aa41801c054d0557f9aea11715af9c31 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 8 Aug 2024 16:59:57 +0200 Subject: [PATCH 8/8] feat(tee): introduce get_tee_proofs RPC method for TEE proofs (#2474) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Add `get_tee_proofs` RPC method to expose TEE attestations and proofs for a given batch number and TEE type. Currently, there can be only one proof instance per batch number per TEE type. In the future, we may allow multiple instances since attestations can be revoked if the machine that produced them is compromised. ## Why ❔ We want to enable anyone to download TEE attestations and proofs for any transaction, allowing them to verify it on their own machines using the `verify-attestation` standalone binary (available at https://github.com/matter-labs/teepot/tree/main/bin/verify-attestation). This is just an intermediate step; the ultimate goal is to have TEE proofs and attestations verification implemented on-chain. ## Checklist - [x] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [x] Code has been formatted via `zk fmt` and `zk lint`. --- core/bin/zksync_tee_prover/src/api_client.rs | 7 +- core/bin/zksync_tee_prover/src/tee_prover.rs | 2 +- ...93c6f5a43386d4183745a4668507cf672b3f6.json | 37 ++++ ...a2fca14965083b0589c3b3efad02e37d55f0c.json | 20 -- ...b608d21dc70397b64ce500881a8b55953c59c.json | 14 -- ...213a0b02b3ff96398920bc0250397bb2a95f.json} | 6 +- ...7a1e1abf3d268b16498cc65758af66781cbb6.json | 15 ++ ...dc118360ab83bae3196ec941216901be629da.json | 35 ++++ ...148b0f1c7e512dd43434062341eb263fe434f.json | 22 -- core/lib/dal/doc/TeeProofGenerationDal.md | 19 ++ .../20240805144000_tee_proofs_reorg.down.sql | 5 + .../20240805144000_tee_proofs_reorg.up.sql | 7 + core/lib/dal/src/models/mod.rs | 1 + core/lib/dal/src/models/storage_tee_proof.rs | 10 + core/lib/dal/src/tee_proof_generation_dal.rs | 194 +++++++++++------- core/lib/prover_interface/src/api.rs | 6 +- core/lib/types/src/api/mod.rs | 13 ++ core/lib/web3_decl/src/namespaces/unstable.rs | 13 +- .../backend_jsonrpsee/namespaces/unstable.rs | 16 +- .../src/web3/namespaces/unstable.rs | 31 ++- .../src/tee_request_processor.rs | 15 +- core/node/proof_data_handler/src/tests.rs | 26 ++- .../tee_verifier_input_producer/src/lib.rs | 4 +- 23 files changed, 367 insertions(+), 151 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json delete mode 100644 core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json delete mode 100644 core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json rename core/lib/dal/.sqlx/{query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json => query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json} (58%) create mode 100644 core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json create mode 100644 core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json delete mode 100644 core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json create mode 100644 core/lib/dal/doc/TeeProofGenerationDal.md create mode 100644 core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql create mode 100644 core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql create mode 100644 core/lib/dal/src/models/storage_tee_proof.rs diff --git a/core/bin/zksync_tee_prover/src/api_client.rs b/core/bin/zksync_tee_prover/src/api_client.rs index 1530da971157..13fbc1ba8868 100644 --- a/core/bin/zksync_tee_prover/src/api_client.rs +++ b/core/bin/zksync_tee_prover/src/api_client.rs @@ -74,8 +74,11 @@ impl TeeApiClient { /// Fetches the next job for the TEE prover to process, verifying and signing it if the /// verification is successful. - pub async fn get_job(&self) -> Result>, TeeProverError> { - let request = TeeProofGenerationDataRequest {}; + pub async fn get_job( + &self, + tee_type: TeeType, + ) -> Result>, TeeProverError> { + let request = TeeProofGenerationDataRequest { tee_type }; let response = self .post::<_, TeeProofGenerationDataResponse, _>("/tee/proof_inputs", request) .await?; diff --git a/core/bin/zksync_tee_prover/src/tee_prover.rs b/core/bin/zksync_tee_prover/src/tee_prover.rs index bcd1e4a1b6b4..64a3a9c5749d 100644 --- a/core/bin/zksync_tee_prover/src/tee_prover.rs +++ b/core/bin/zksync_tee_prover/src/tee_prover.rs @@ -112,7 +112,7 @@ impl TeeProver { } async fn step(&self) -> Result, TeeProverError> { - match self.api_client.get_job().await? { + match self.api_client.get_job(self.tee_type).await? { Some(job) => { let (signature, batch_number, root_hash) = self.verify(*job)?; self.api_client diff --git a/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json b/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json new file mode 100644 index 000000000000..540660bddf34 --- /dev/null +++ b/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n tee_type = $1\n AND l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $2\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $3::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + { + "Custom": { + "name": "tee_verifier_input_producer_job_status", + "kind": { + "Enum": [ + "Queued", + "ManuallySkipped", + "InProgress", + "Successful", + "Failed" + ] + } + } + }, + "Interval" + ] + }, + "nullable": [ + false + ] + }, + "hash": "286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6" +} diff --git a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json b/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json deleted file mode 100644 index f0603488f1e8..000000000000 --- a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c" -} diff --git a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json b/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json deleted file mode 100644 index 994bfcfbb5a2..000000000000 --- a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c" -} diff --git a/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json b/core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json similarity index 58% rename from core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json rename to core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json index 8e210aade885..8b67041427d3 100644 --- a/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json +++ b/core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json @@ -1,18 +1,18 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n signature = $1,\n pubkey = $2,\n proof = $3,\n tee_type = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", + "query": "\n UPDATE tee_proof_generation_details\n SET\n tee_type = $1,\n status = 'generated',\n pubkey = $2,\n signature = $3,\n proof = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", "describe": { "columns": [], "parameters": { "Left": [ + "Text", "Bytea", "Bytea", "Bytea", - "Text", "Int8" ] }, "nullable": [] }, - "hash": "727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711" + "hash": "a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f" } diff --git a/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json b/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json new file mode 100644 index 000000000000..0ed8005289f7 --- /dev/null +++ b/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number, tee_type) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6" +} diff --git a/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json b/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json new file mode 100644 index 000000000000..70f7f9d12fa4 --- /dev/null +++ b/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json @@ -0,0 +1,35 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $1\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "tee_verifier_input_producer_job_status", + "kind": { + "Enum": [ + "Queued", + "ManuallySkipped", + "InProgress", + "Successful", + "Failed" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da" +} diff --git a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json b/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json deleted file mode 100644 index 4236e72fccad..000000000000 --- a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $1::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Interval" - ] - }, - "nullable": [ - false - ] - }, - "hash": "e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f" -} diff --git a/core/lib/dal/doc/TeeProofGenerationDal.md b/core/lib/dal/doc/TeeProofGenerationDal.md new file mode 100644 index 000000000000..23474d5cb5c5 --- /dev/null +++ b/core/lib/dal/doc/TeeProofGenerationDal.md @@ -0,0 +1,19 @@ +# TeeProofGenerationDal + +## Table Name + +`tee_proofs` + +## `status` Diagram + +```mermaid +--- +title: Status Diagram +--- +stateDiagram-v2 +[*] --> ready_to_be_proven : insert_tee_proof_generation_job +ready_to_be_proven --> picked_by_prover : get_next_batch_to_be_proven +picked_by_prover --> generated : save_proof_artifacts_metadata +generated --> [*] + +``` diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql new file mode 100644 index 000000000000..09a162f31fbf --- /dev/null +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE tee_verifier_input_producer_jobs ADD COLUMN picked_by TEXT; + +ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; +ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type DROP NOT NULL; +ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number); diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql new file mode 100644 index 000000000000..160af44c221c --- /dev/null +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql @@ -0,0 +1,7 @@ +ALTER TABLE tee_verifier_input_producer_jobs DROP COLUMN picked_by; + +ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; +UPDATE tee_proof_generation_details SET tee_type = 'sgx' WHERE tee_type IS NULL; +ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type SET NOT NULL; +ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number SET NOT NULL; +ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number, tee_type); diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index 1e852e3f6364..d22541620f2a 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -11,6 +11,7 @@ pub mod storage_log; pub mod storage_oracle_info; pub mod storage_protocol_version; pub mod storage_sync; +pub mod storage_tee_proof; pub mod storage_transaction; pub mod storage_verification_request; pub mod storage_witness_job_info; diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs new file mode 100644 index 000000000000..5c93361e7df1 --- /dev/null +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -0,0 +1,10 @@ +use chrono::NaiveDateTime; + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageTeeProof { + pub pubkey: Option>, + pub signature: Option>, + pub proof: Option>, + pub updated_at: NaiveDateTime, + pub attestation: Option>, +} diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 43e86116092b..2bd73323eb10 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,14 +1,16 @@ +#![doc = include_str!("../doc/TeeProofGenerationDal.md")] use std::time::Duration; use zksync_db_connection::{ - connection::Connection, - error::DalResult, - instrument::{InstrumentExt, Instrumented}, + connection::Connection, error::DalResult, instrument::Instrumented, utils::pg_interval_from_duration, }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; -use crate::Core; +use crate::{ + models::storage_tee_proof::StorageTeeProof, + tee_verifier_input_producer_dal::TeeVerifierInputProducerJobStatus, Core, +}; #[derive(Debug)] pub struct TeeProofGenerationDal<'a, 'c> { @@ -16,12 +18,13 @@ pub struct TeeProofGenerationDal<'a, 'c> { } impl TeeProofGenerationDal<'_, '_> { - pub async fn get_next_block_to_be_proven( + pub async fn get_next_batch_to_be_proven( &mut self, + tee_type: TeeType, processing_timeout: Duration, ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); - let result: Option = sqlx::query!( + let query = sqlx::query!( r#" UPDATE tee_proof_generation_details SET @@ -29,19 +32,20 @@ impl TeeProofGenerationDal<'_, '_> { updated_at = NOW(), prover_taken_at = NOW() WHERE - l1_batch_number = ( + tee_type = $1 + AND l1_batch_number = ( SELECT proofs.l1_batch_number FROM tee_proof_generation_details AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE - inputs.status = 'Successful' + inputs.status = $2 AND ( proofs.status = 'ready_to_be_proven' OR ( proofs.status = 'picked_by_prover' - AND proofs.prover_taken_at < NOW() - $1::INTERVAL + AND proofs.prover_taken_at < NOW() - $3::INTERVAL ) ) ORDER BY @@ -54,48 +58,53 @@ impl TeeProofGenerationDal<'_, '_> { RETURNING tee_proof_generation_details.l1_batch_number "#, + &tee_type.to_string(), + TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus, &processing_timeout, - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + ); + let batch_number = Instrumented::new("get_next_batch_to_be_proven") + .with_arg("tee_type", &tee_type) + .with_arg("processing_timeout", &processing_timeout) + .with(query) + .fetch_optional(self.storage) + .await? + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - Ok(result) + Ok(batch_number) } pub async fn save_proof_artifacts_metadata( &mut self, - block_number: L1BatchNumber, - signature: &[u8], + batch_number: L1BatchNumber, + tee_type: TeeType, pubkey: &[u8], + signature: &[u8], proof: &[u8], - tee_type: TeeType, ) -> DalResult<()> { let query = sqlx::query!( r#" UPDATE tee_proof_generation_details SET + tee_type = $1, status = 'generated', - signature = $1, pubkey = $2, - proof = $3, - tee_type = $4, + signature = $3, + proof = $4, updated_at = NOW() WHERE l1_batch_number = $5 "#, - signature, + tee_type.to_string(), pubkey, + signature, proof, - tee_type.to_string(), - i64::from(block_number.0) + i64::from(batch_number.0) ); let instrumentation = Instrumented::new("save_proof_artifacts_metadata") - .with_arg("signature", &signature) + .with_arg("tee_type", &tee_type) .with_arg("pubkey", &pubkey) - .with_arg("proof", &proof) - .with_arg("tee_type", &tee_type); + .with_arg("signature", &signature) + .with_arg("proof", &proof); let result = instrumentation .clone() .with(query) @@ -103,7 +112,8 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Updating TEE proof for a non-existent batch number is not allowed" + "Updating TEE proof for a non-existent batch number {} is not allowed", + batch_number )); return Err(err); } @@ -113,53 +123,33 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn insert_tee_proof_generation_job( &mut self, - block_number: L1BatchNumber, + batch_number: L1BatchNumber, + tee_type: TeeType, ) -> DalResult<()> { - let block_number = i64::from(block_number.0); - sqlx::query!( + let batch_number = i64::from(batch_number.0); + let query = sqlx::query!( r#" INSERT INTO - tee_proof_generation_details (l1_batch_number, status, created_at, updated_at) + tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at) VALUES - ($1, 'ready_to_be_proven', NOW(), NOW()) - ON CONFLICT (l1_batch_number) DO NOTHING + ($1, $2, 'ready_to_be_proven', NOW(), NOW()) + ON CONFLICT (l1_batch_number, tee_type) DO NOTHING "#, - block_number, - ) - .instrument("create_tee_proof_generation_details") - .with_arg("l1_batch_number", &block_number) - .report_latency() - .execute(self.storage) - .await?; + batch_number, + tee_type.to_string(), + ); + let instrumentation = Instrumented::new("insert_tee_proof_generation_job") + .with_arg("l1_batch_number", &batch_number) + .with_arg("tee_type", &tee_type); + instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; Ok(()) } - pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { - let result: Option = sqlx::query!( - r#" - SELECT - proofs.l1_batch_number - FROM - tee_proof_generation_details AS proofs - JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number - WHERE - inputs.status = 'Successful' - AND proofs.status = 'ready_to_be_proven' - ORDER BY - proofs.l1_batch_number ASC - LIMIT - 1 - "#, - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - - Ok(result) - } - pub async fn save_attestation(&mut self, pubkey: &[u8], attestation: &[u8]) -> DalResult<()> { let query = sqlx::query!( r#" @@ -175,18 +165,76 @@ impl TeeProofGenerationDal<'_, '_> { let instrumentation = Instrumented::new("save_attestation") .with_arg("pubkey", &pubkey) .with_arg("attestation", &attestation); - let result = instrumentation + instrumentation .clone() .with(query) .execute(self.storage) .await?; - if result.rows_affected() == 0 { - let err = instrumentation.constraint_error(anyhow::anyhow!( - "Unable to insert TEE attestation: given pubkey already has an attestation assigned" - )); - return Err(err); - } Ok(()) } + + pub async fn get_tee_proofs( + &mut self, + batch_number: L1BatchNumber, + tee_type: Option, + ) -> DalResult> { + let query = format!( + r#" + SELECT + tp.pubkey, + tp.signature, + tp.proof, + tp.updated_at, + ta.attestation + FROM + tee_proof_generation_details tp + LEFT JOIN + tee_attestations ta ON tp.pubkey = ta.pubkey + WHERE + tp.l1_batch_number = $1 + AND tp.status = 'generated' + {} + ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC + "#, + tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $2".to_string()) + ); + + let mut query = sqlx::query_as(&query).bind(i64::from(batch_number.0)); + + if let Some(tee_type) = tee_type { + query = query.bind(tee_type.to_string()); + } + + let proofs: Vec = query.fetch_all(self.storage.conn()).await.unwrap(); + + Ok(proofs) + } + + pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { + let query = sqlx::query!( + r#" + SELECT + proofs.l1_batch_number + FROM + tee_proof_generation_details AS proofs + JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number + WHERE + inputs.status = $1 + AND proofs.status = 'ready_to_be_proven' + ORDER BY + proofs.l1_batch_number ASC + LIMIT + 1 + "#, + TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus + ); + let batch_number = Instrumented::new("get_oldest_unpicked_batch") + .with(query) + .fetch_optional(self.storage) + .await? + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + + Ok(batch_number) + } } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index e0b617ee3bea..e4fe566618b8 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; use zksync_types::{ protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, + tee_types::TeeType, L1BatchNumber, }; @@ -52,7 +53,10 @@ pub enum RegisterTeeAttestationResponse { #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationDataRequest {} -pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; +#[derive(Debug, Serialize, Deserialize)] +pub struct TeeProofGenerationDataRequest { + pub tee_type: TeeType, +} #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofRequest { diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index 751de9bd7040..e6c84ecec537 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -3,6 +3,7 @@ use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde_json::Value; use strum::Display; use zksync_basic_types::{ + tee_types::TeeType, web3::{AccessList, Bytes, Index}, L1BatchNumber, H160, H2048, H256, H64, U256, U64, }; @@ -810,6 +811,18 @@ pub struct Proof { pub storage_proof: Vec, } +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TeeProof { + pub l1_batch_number: L1BatchNumber, + pub tee_type: Option, + pub pubkey: Option>, + pub signature: Option>, + pub proof: Option>, + pub proved_at: DateTime, + pub attestation: Option>, +} + #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct TransactionDetailedResult { diff --git a/core/lib/web3_decl/src/namespaces/unstable.rs b/core/lib/web3_decl/src/namespaces/unstable.rs index 4996813a9855..d2a6e29c7083 100644 --- a/core/lib/web3_decl/src/namespaces/unstable.rs +++ b/core/lib/web3_decl/src/namespaces/unstable.rs @@ -1,7 +1,11 @@ #[cfg_attr(not(feature = "server"), allow(unused_imports))] use jsonrpsee::core::RpcResult; use jsonrpsee::proc_macros::rpc; -use zksync_types::{api::TransactionExecutionInfo, H256}; +use zksync_types::{ + api::{TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, H256, +}; use crate::client::{ForNetwork, L2}; @@ -20,4 +24,11 @@ pub trait UnstableNamespace { &self, hash: H256, ) -> RpcResult>; + + #[method(name = "getTeeProofs")] + async fn tee_proofs( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> RpcResult>; } diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs index 6abaa718a050..91330aa7d949 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs @@ -1,4 +1,8 @@ -use zksync_types::{api::TransactionExecutionInfo, H256}; +use zksync_types::{ + api::{TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, H256, +}; use zksync_web3_decl::{ jsonrpsee::core::{async_trait, RpcResult}, namespaces::UnstableNamespaceServer, @@ -16,4 +20,14 @@ impl UnstableNamespaceServer for UnstableNamespace { .await .map_err(|err| self.current_method().map_err(err)) } + + async fn tee_proofs( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> RpcResult> { + self.get_tee_proofs_impl(l1_batch_number, tee_type) + .await + .map_err(|err| self.current_method().map_err(err)) + } } diff --git a/core/node/api_server/src/web3/namespaces/unstable.rs b/core/node/api_server/src/web3/namespaces/unstable.rs index b46ecd6dc530..783088cdc36a 100644 --- a/core/node/api_server/src/web3/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/namespaces/unstable.rs @@ -1,5 +1,10 @@ +use chrono::{DateTime, Utc}; use zksync_dal::{CoreDal, DalError}; -use zksync_types::api::TransactionExecutionInfo; +use zksync_types::{ + api::{TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, +}; use zksync_web3_decl::{error::Web3Error, types::H256}; use crate::web3::{backend_jsonrpsee::MethodTracer, RpcState}; @@ -30,4 +35,28 @@ impl UnstableNamespace { .map_err(DalError::generalize)? .map(|execution_info| TransactionExecutionInfo { execution_info })) } + + pub async fn get_tee_proofs_impl( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> Result, Web3Error> { + let mut storage = self.state.acquire_connection().await?; + Ok(storage + .tee_proof_generation_dal() + .get_tee_proofs(l1_batch_number, tee_type) + .await + .map_err(DalError::generalize)? + .into_iter() + .map(|proof| TeeProof { + l1_batch_number, + tee_type, + pubkey: proof.pubkey, + signature: proof.signature, + proof: proof.proof, + proved_at: DateTime::::from_naive_utc_and_offset(proof.updated_at, Utc), + attestation: proof.attestation, + }) + .collect::>()) + } } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 243c9e06cfcc..d85591dd2c90 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -49,9 +49,10 @@ impl TeeRequestProcessor { let l1_batch_number_result = connection .tee_proof_generation_dal() - .get_next_block_to_be_proven(self.config.proof_generation_timeout()) + .get_next_batch_to_be_proven(request.tee_type, self.config.proof_generation_timeout()) .await .map_err(RequestProcessorError::Dal)?; + let l1_batch_number = match l1_batch_number_result { Some(number) => number, None => return Ok(Json(TeeProofGenerationDataResponse(None))), @@ -63,9 +64,9 @@ impl TeeRequestProcessor { .await .map_err(RequestProcessorError::ObjectStore)?; - Ok(Json(TeeProofGenerationDataResponse(Some(Box::new( - tee_verifier_input, - ))))) + let response = TeeProofGenerationDataResponse(Some(Box::new(tee_verifier_input))); + + Ok(Json(response)) } pub(crate) async fn submit_proof( @@ -82,16 +83,16 @@ impl TeeRequestProcessor { let mut dal = connection.tee_proof_generation_dal(); tracing::info!( - "Received proof {:?} for block number: {:?}", + "Received proof {:?} for batch number: {:?}", proof, l1_batch_number ); dal.save_proof_artifacts_metadata( l1_batch_number, - &proof.0.signature, + proof.0.tee_type, &proof.0.pubkey, + &proof.0.signature, &proof.0.proof, - proof.0.tee_type, ) .await .map_err(RequestProcessorError::Dal)?; diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 1fbe563d2d28..88d4930e6920 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -18,7 +18,7 @@ use zksync_prover_interface::{ api::SubmitTeeProofRequest, inputs::{TeeVerifierInput, V1TeeVerifierInput, WitnessInputMerklePaths}, }; -use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; +use zksync_types::{commitment::L1BatchCommitmentMode, tee_types::TeeType, L1BatchNumber, H256}; use crate::create_proof_processing_router; @@ -94,7 +94,7 @@ async fn request_tee_proof_inputs() { }, L1BatchCommitmentMode::Rollup, ); - let req_body = Body::from(serde_json::to_vec(&json!({})).unwrap()); + let req_body = Body::from(serde_json::to_vec(&json!({ "tee_type": "Sgx" })).unwrap()); let response = app .oneshot( Request::builder() @@ -180,6 +180,26 @@ async fn submit_tee_proof() { .unwrap(); assert!(oldest_batch_number.is_none()); + + // there should be one SGX proof in the db now + + let proofs = proof_db_conn + .tee_proof_generation_dal() + .get_tee_proofs(batch_number, Some(TeeType::Sgx)) + .await + .unwrap(); + + assert_eq!(proofs.len(), 1); + + let proof = &proofs[0]; + + assert_eq!(proof.proof.as_ref().unwrap(), &tee_proof_request.0.proof); + assert_eq!(proof.attestation.as_ref().unwrap(), &attestation); + assert_eq!( + proof.signature.as_ref().unwrap(), + &tee_proof_request.0.signature + ); + assert_eq!(proof.pubkey.as_ref().unwrap(), &tee_proof_request.0.pubkey); } // Mock SQL db with information about the status of the TEE proof generation @@ -215,7 +235,7 @@ async fn mock_tee_batch_status( // mock SQL table with relevant information about the status of TEE proof generation ('ready_to_be_proven') proof_dal - .insert_tee_proof_generation_job(batch_number) + .insert_tee_proof_generation_job(batch_number, TeeType::Sgx) .await .expect("Failed to insert tee_proof_generation_job"); diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index 0cd28ee5ce79..abd70542a42f 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -19,7 +19,7 @@ use zksync_prover_interface::inputs::{ }; use zksync_queued_job_processor::JobProcessor; use zksync_tee_verifier::Verify; -use zksync_types::{L1BatchNumber, L2ChainId}; +use zksync_types::{tee_types::TeeType, L1BatchNumber, L2ChainId}; use zksync_utils::u256_to_h256; use zksync_vm_utils::storage::L1BatchParamsProvider; @@ -241,7 +241,7 @@ impl JobProcessor for TeeVerifierInputProducer { .context("failed to mark job as successful for TeeVerifierInputProducer")?; transaction .tee_proof_generation_dal() - .insert_tee_proof_generation_job(job_id) + .insert_tee_proof_generation_job(job_id, TeeType::Sgx) .await?; transaction .commit()